diff --git a/.github/workflows/quicktest-dev-pr.yml b/.github/workflows/quicktest-dev-pr.yml
new file mode 100644
index 0000000000000000000000000000000000000000..fff5b9618de9c2e223c86bc9add2cf3990c5fb78
--- /dev/null
+++ b/.github/workflows/quicktest-dev-pr.yml
@@ -0,0 +1,23 @@
+name: QuicktestPRAgainstDev
+
+on:
+  pull_request:
+    branches: [ dev ]
+  push:
+    branches: [ dev ]
+
+
+jobs:
+
+  test:
+    name: Run quicktest on PR branch
+    runs-on: ubuntu-16.04
+
+    steps:
+      - name: checkout
+        uses: actions/checkout@v2
+
+      - name: DockerRunQuicktest
+        run: |
+          docker build -t finn_gha -f docker/Dockerfile.finn_ci --build-arg BUILD_PATH=/tmp/finn_gha .
+          docker run --init --hostname finn_gha -v $(pwd):/workspace/finn -e FINN_INST_NAME=finn_gha finn_gha quicktest.sh
diff --git a/.gitignore b/.gitignore
index 0411de3941d790fd1668fe2328b248cd3c09be08..0c1bbd84fe24be46446a7d714dd708d601813e53 100644
--- a/.gitignore
+++ b/.gitignore
@@ -76,13 +76,18 @@ MANIFEST
 # Per-project virtualenvs
 .venv*/
 
-# Cloned dependencies for Docker
-/brevitas/
-/brevitas_cnv_lfc/
-/cnpy/
-/finn-hlslib/
-/pyverilator/
-/PYNQ-HelloWorld/
-
 # Jenkins cfg dir
 /docker/jenkins_home
+
+# SSH key dir mounted into Docker
+/ssh_keys/
+
+# PYNQ board files
+/board_files/
+
+# datasets for testing
+/dataset/
+/data/
+
+# Google Drive key for dashboard
+/gdrive-key/
diff --git a/AUTHORS.rst b/AUTHORS.rst
index e231e61d38991e11e2e43a7c9a3a78c50c878244..eb1e06e54b7eb6deedd3e7f8392bb3aa257e7dc6 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -6,3 +6,5 @@ Contributors
 * Jakoba Petri-Koenig (@auphelia)
 * Andrea Rigoni (@AndreaRigoni)
 * Hendrik Borras (@HenniOVP)
+* Lucian Petrica (@quetric)
+* Tobias Alonso (@Tobi-Alonso)
diff --git a/README.md b/README.md
index 8e122fdad693d90cbabb738cb6da81d31a82dfd3..6d485627a322c7b192c1e9ad5a1058487952b11a 100644
--- a/README.md
+++ b/README.md
@@ -23,6 +23,7 @@ Please see the [Getting Started](https://finn.readthedocs.io/en/latest/getting_s
 
 ## What's New in FINN?
 
+* **2020-09-21:** v0.4b (beta) is released. Read more on the <a href="https://xilinx.github.io/finn/2020/09/21/finn-v04b-beta-is-released.html">release blog post</a>.
 * **2020-05-08:** v0.3b (beta) is released, with initial support for convolutions, parallel transformations, more flexible memory allocation for MVAUs, throughput testing and many other smaller improvements and bugfixes. Read more on the <a href="https://xilinx.github.io/finn/2020/05/08/finn-v03b-beta-is-released.html">release blog post</a>.
 * **2020-04-15:** FINN v0.2.1b (beta): use fixed commit versions for dependency repos, otherwise identical to 0.2b
 * **2020-02-28:** FINN v0.2b (beta) is released, which is a clean-slate reimplementation of the framework. Currently only fully-connected networks are supported for the end-to-end flow. Please see the release blog post for a summary of the key features.
diff --git a/docker/Dockerfile.finn_ci b/docker/Dockerfile.finn_ci
index fb257b05c7c5e63922fe9c51241c18ab671ec0ba..fac168d55edd565b1cf84c4d9b556c51feb4e526 100644
--- a/docker/Dockerfile.finn_ci
+++ b/docker/Dockerfile.finn_ci
@@ -30,44 +30,42 @@ FROM pytorch/pytorch:1.1.0-cuda10.0-cudnn7.5-devel
 MAINTAINER Yaman Umuroglu <yamanu@xilinx.com>
 ARG PYTHON_VERSION=3.6
 ARG BUILD_PATH
-ARG FINN_CI_BRANCH
 
 WORKDIR /workspace
 
 RUN apt-get update
 RUN apt-get -y upgrade
 RUN apt-get install -y build-essential libglib2.0-0 libsm6 libxext6 libxrender-dev
-RUN apt install verilator
-RUN apt-get -y install sshpass
+RUN apt-get install -y verilator zsh
+RUN apt-get -y install sshpass wget unzip
 RUN echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config
 
 # cloning dependency repos
 # Brevitas
 RUN git clone https://github.com/Xilinx/brevitas.git /workspace/brevitas
-# Brevitas examples
-RUN git clone https://github.com/maltanar/brevitas_cnv_lfc.git /workspace/brevitas_cnv_lfc
 # CNPY
 RUN git clone https://github.com/rogersce/cnpy.git /workspace/cnpy
 # FINN hlslib
-RUN git clone https://github.com/maltanar/finn-hlslib.git /workspace/finn-hlslib
+RUN git clone https://github.com/Xilinx/finn-hlslib.git /workspace/finn-hlslib
 # PyVerilator
 RUN git clone https://github.com/maltanar/pyverilator /workspace/pyverilator
-# PYNQ-HelloWorld
-RUN git clone https://github.com/maltanar/PYNQ-HelloWorld.git /workspace/PYNQ-HelloWorld
+# oh-my-xilinx
+RUN git clone https://bitbucket.org/maltanar/oh-my-xilinx.git /workspace/oh-my-xilinx
 
-# checkout desired FINN branch for testing
-RUN git clone --branch $FINN_CI_BRANCH https://github.com/Xilinx/finn /workspace/finn
-
-RUN pip install -r /workspace/finn/requirements.txt
+COPY requirements.txt .
+RUN pip install -r requirements.txt
+RUN rm requirements.txt
 RUN apt update; apt install nano
 RUN pip install pytest-dependency
+RUN pip install pytest-xdist
+RUN pip install pytest-parallel
+RUN pip install -e git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading
 
 ENV PYTHONPATH "${PYTHONPATH}:/workspace/finn/src"
-ENV PYTHONPATH "${PYTHONPATH}:/workspace/brevitas_cnv_lfc/training_scripts"
-ENV PYTHONPATH "${PYTHONPATH}:/workspace/brevitas"
 ENV PYTHONPATH "${PYTHONPATH}:/workspace/pyverilator"
-ENV PYNQSHELL_PATH "/workspace/PYNQ-HelloWorld/boards"
 ENV VIVADO_IP_CACHE "$BUILD_PATH/vivado_ip_cache"
+ENV PATH "${PATH}:/workspace/oh-my-xilinx"
+ENV OHMYXILINX "/workspace/oh-my-xilinx"
 
 # colorful terminal output
 RUN echo "PS1='\[\033[1;36m\]\u\[\033[1;31m\]@\[\033[1;32m\]\h:\[\033[1;35m\]\w\[\033[1;31m\]\$\[\033[0m\] '" >>  /root/.bashrc
@@ -76,7 +74,9 @@ RUN mkdir -p $VIVADO_IP_CACHE
 
 WORKDIR /workspace/finn
 
-COPY finn_entrypoint.sh /usr/local/bin/
+COPY docker/finn_entrypoint.sh /usr/local/bin/
+COPY docker/quicktest.sh /usr/local/bin/
 RUN chmod 755 /usr/local/bin/finn_entrypoint.sh
+RUN chmod 755 /usr/local/bin/quicktest.sh
 ENTRYPOINT ["finn_entrypoint.sh"]
 CMD ["bash"]
diff --git a/docker/Dockerfile.finn_dev b/docker/Dockerfile.finn_dev
index f2a09aa579f0a15d468c4f350e58e2a4f5e3947a..4be975442665c680ccbb8aefd1d0bb8d07b81cc9 100644
--- a/docker/Dockerfile.finn_dev
+++ b/docker/Dockerfile.finn_dev
@@ -37,28 +37,28 @@ ARG PASSWD
 ARG JUPYTER_PORT
 ARG NETRON_PORT
 
-EXPOSE $JUPYTER_PORT
-EXPOSE $NETRON_PORT
-
 WORKDIR /workspace
 
 RUN apt-get update
 RUN apt-get -y upgrade
 RUN apt-get install -y build-essential libglib2.0-0 libsm6 libxext6 libxrender-dev
-RUN apt-get install verilator
-RUN apt-get install nano
-RUN apt-get -y install sshpass
+RUN apt-get install -y verilator nano zsh rsync
+RUN apt-get -y install sshpass wget unzip
 RUN echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config
 
 COPY requirements.txt .
 RUN pip install -r requirements.txt
 RUN rm requirements.txt
-RUN pip install jupyter
+RUN pip install pygments==2.4.1
+RUN pip install jupyter==1.0.0
+RUN pip install matplotlib==3.3.1 --ignore-installed
+RUN pip install pytest-dependency==0.5.1
+RUN pip install sphinx==3.1.2
+RUN pip install sphinx_rtd_theme==0.5.0
+RUN pip install pytest-xdist==2.0.0
+RUN pip install pytest-parallel==0.1.0
 RUN pip install netron
-RUN pip install matplotlib --ignore-installed
-RUN pip install pytest-dependency
-RUN pip install sphinx
-RUN pip install sphinx_rtd_theme
+RUN pip install -e git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading
 
 # switch user
 RUN groupadd -g $GID $GNAME
@@ -73,24 +73,21 @@ USER $UNAME
 # cloning dependency repos (as user)
 # Brevitas
 RUN git clone https://github.com/Xilinx/brevitas.git /workspace/brevitas
-# Brevitas examples
-RUN git clone https://github.com/maltanar/brevitas_cnv_lfc.git /workspace/brevitas_cnv_lfc
 # CNPY
 RUN git clone https://github.com/rogersce/cnpy.git /workspace/cnpy
 # FINN hlslib
-RUN git clone https://github.com/maltanar/finn-hlslib.git /workspace/finn-hlslib
+RUN git clone https://github.com/Xilinx/finn-hlslib.git /workspace/finn-hlslib
 # PyVerilator
 RUN git clone https://github.com/maltanar/pyverilator /workspace/pyverilator
-# PYNQ-HelloWorld
-RUN git clone https://github.com/maltanar/PYNQ-HelloWorld.git /workspace/PYNQ-HelloWorld
+# oh-my-xilinx
+RUN git clone https://bitbucket.org/maltanar/oh-my-xilinx.git /workspace/oh-my-xilinx
 
 # for this developer-oriented Docker container we assume the FINN repo is cloned and mounted from the host
 # at /workspace/finn -- see run-docker.sh for an example of how to do this.
 ENV PYTHONPATH "${PYTHONPATH}:/workspace/finn/src"
-ENV PYTHONPATH "${PYTHONPATH}:/workspace/brevitas_cnv_lfc/training_scripts"
-ENV PYTHONPATH "${PYTHONPATH}:/workspace/brevitas"
 ENV PYTHONPATH "${PYTHONPATH}:/workspace/pyverilator"
-ENV PYNQSHELL_PATH "/workspace/PYNQ-HelloWorld/boards"
+ENV PATH "${PATH}:/workspace/oh-my-xilinx:/home/$UNAME/.local/bin"
+ENV OHMYXILINX "/workspace/oh-my-xilinx"
 
 WORKDIR /home/$UNAME/finn
 RUN echo "PS1='\[\033[1;36m\]\u\[\033[1;31m\]@\[\033[1;32m\]\h:\[\033[1;35m\]\w\[\033[1;31m\]\$\[\033[0m\] '" >>  /home/$UNAME/.bashrc
@@ -99,8 +96,13 @@ RUN echo "source \$VIVADO_PATH/settings64.sh" >> /home/$UNAME/.bashrc
 # copy entrypoint script
 USER root
 COPY docker/finn_entrypoint.sh /usr/local/bin/
+COPY docker/quicktest.sh /usr/local/bin/
 RUN chmod 755 /usr/local/bin/finn_entrypoint.sh
+RUN chmod 755 /usr/local/bin/quicktest.sh
 USER $UNAME
 
+EXPOSE $JUPYTER_PORT
+EXPOSE $NETRON_PORT
+
 ENTRYPOINT ["finn_entrypoint.sh"]
 CMD ["bash"]
diff --git a/docker/Jenkinsfile b/docker/Jenkinsfile
index 80be261fb3da057186259598f84d915176577a5d..b2d3102bd4aa3c00620f41c102af5a8b385cede7 100644
--- a/docker/Jenkinsfile
+++ b/docker/Jenkinsfile
@@ -9,12 +9,19 @@ pipeline {
         string(name: 'PYNQ_PASSWORD', defaultValue: 'xilinx', description: 'PYNQ board password')
         string(name: 'PYNQ_TARGET_DIR', defaultValue: '/home/xilinx/finn', description: 'PYNQ board target deployment directory')
         string(name: 'NUM_DEFAULT_WORKERS', defaultValue: '1', description: 'Number of cores for parallel transformations')
-        string(name: 'DOCKER_CMD', defaultValue: """python setup.py test""", description: 'Command to run')
+        // main test: everything except rtlsim and end2end tests, parallel run with xdist, no parallel transformations to save on memory
+        string(name: 'DOCKER_CMD_MAIN', defaultValue: """python setup.py test --addopts "-k 'not (rtlsim or end2end)' --dist=loadfile -n auto" """, description: 'Main test command')
+        // rtlsim tests: parallel run with pytest-parallel, no parallel transformations to save on memory
+        string(name: 'DOCKER_CMD_RTLSIM', defaultValue: """python setup.py test --addopts "-k rtlsim --workers auto" """, description: 'rtlsim test command')
+        // end2end tests: no parallel testing, use NUM_DEFAULT_WORKERS for parallel transformations
+        string(name: 'DOCKER_CMD_END2END', defaultValue: """python setup.py test --addopts "-k end2end" """, description: 'end2end test command')
+        // allow specifying where to mount the cloned folder from, since Jenkins and FINN may be running in separate containers
+        string(name: 'WORKSPACE_MOUNT', defaultValue: '/var/jenkins_home/workspace/finn', description: 'Path to Jenkins workspace mount')
     }
     environment {
         DOCKER_TAG='finn_ci:$BUILD_ID'
-        DOCKER_INST_NAME='finn_ci_$BUILD_ID'
-        BUILD_PATH='/tmp/finn_ci_$BUILD_ID'
+        DOCKER_INST_NAME='finn_ci'
+        BUILD_PATH='/tmp/finn_ci'
     }
     stages {
         stage("Clone") {
@@ -27,16 +34,57 @@ pipeline {
                 sh """
                 docker build -t $DOCKER_TAG -f docker/Dockerfile.finn_ci \
                 --build-arg BUILD_PATH=$BUILD_PATH \
-                --build-arg FINN_CI_BRANCH=${params.FINN_CI_BRANCH} \
-                docker/
+                .
                 """
             }
         }
-        stage('Test') {
+        stage('test-main') {
             steps {
+                catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
                 sh """
-                docker run --name $DOCKER_INST_NAME --init \
+                docker run --init \
                 --hostname $DOCKER_INST_NAME \
+                -v ${params.WORKSPACE_MOUNT}:/workspace/finn \
+                -v ${params.VIVADO_PATH}:${params.VIVADO_PATH}:ro \
+                -e NUM_DEFAULT_WORKERS=1 \
+                -e FINN_INST_NAME=$DOCKER_INST_NAME \
+                -e VIVADO_PATH=${params.VIVADO_PATH} \
+                -e PYNQ_BOARD=${params.PYNQ_BOARD} \
+                -e PYNQ_IP=${params.PYNQ_IP} \
+                -e PYNQ_USERNAME=${params.PYNQ_USERNAME} \
+                -e PYNQ_PASSWORD=${params.PYNQ_PASSWORD} \
+                -e PYNQ_TARGET_DIR=${params.PYNQ_TARGET_DIR} \
+                $DOCKER_TAG ${params.DOCKER_CMD_MAIN}
+                """}
+            }
+        }
+        stage('test-rtlsim') {
+            steps {
+                catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
+                sh """
+                docker run --init \
+                --hostname $DOCKER_INST_NAME \
+                -v ${params.WORKSPACE_MOUNT}:/workspace/finn \
+                -v ${params.VIVADO_PATH}:${params.VIVADO_PATH}:ro \
+                -e NUM_DEFAULT_WORKERS=1 \
+                -e FINN_INST_NAME=$DOCKER_INST_NAME \
+                -e VIVADO_PATH=${params.VIVADO_PATH} \
+                -e PYNQ_BOARD=${params.PYNQ_BOARD} \
+                -e PYNQ_IP=${params.PYNQ_IP} \
+                -e PYNQ_USERNAME=${params.PYNQ_USERNAME} \
+                -e PYNQ_PASSWORD=${params.PYNQ_PASSWORD} \
+                -e PYNQ_TARGET_DIR=${params.PYNQ_TARGET_DIR} \
+                $DOCKER_TAG ${params.DOCKER_CMD_RTLSIM}
+                """}
+            }
+        }
+        stage('test-end2end') {
+            steps {
+                catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
+                sh """
+                docker run --init \
+                --hostname $DOCKER_INST_NAME \
+                -v ${params.WORKSPACE_MOUNT}:/workspace/finn \
                 -v ${params.VIVADO_PATH}:${params.VIVADO_PATH}:ro \
                 -e NUM_DEFAULT_WORKERS=${params.NUM_DEFAULT_WORKERS} \
                 -e FINN_INST_NAME=$DOCKER_INST_NAME \
@@ -46,8 +94,8 @@ pipeline {
                 -e PYNQ_USERNAME=${params.PYNQ_USERNAME} \
                 -e PYNQ_PASSWORD=${params.PYNQ_PASSWORD} \
                 -e PYNQ_TARGET_DIR=${params.PYNQ_TARGET_DIR} \
-                $DOCKER_TAG ${params.DOCKER_CMD}
-                """
+                $DOCKER_TAG ${params.DOCKER_CMD_END2END}
+                """ }
             }
         }
     }
diff --git a/docker/finn_entrypoint.sh b/docker/finn_entrypoint.sh
index 035bba3b53d85a8457eff1e7c1a23e0efff60caa..b750d3007d07f4cc0d2b08ef59dcd87b3d90cb61 100644
--- a/docker/finn_entrypoint.sh
+++ b/docker/finn_entrypoint.sh
@@ -1,6 +1,5 @@
 #!/bin/bash
 
-export XILINX_VIVADO=$VIVADO_PATH
 export SHELL=/bin/bash
 export FINN_ROOT=/workspace/finn
 
@@ -13,23 +12,18 @@ gecho () {
 
 # checkout the correct dependency repo commits
 # the repos themselves are cloned in the Dockerfile
-BREVITAS_COMMIT=215cf44c76d562339fca368c8c3afee3110033e8
-BREVITAS_EXAMPLES_COMMIT=2059f96bd576bf71f32c757e7f92617a70190c90
+BREVITAS_COMMIT=6ffefa8dbf37fdb0f44c994f34604c29fadb16b0
 CNPY_COMMIT=4e8810b1a8637695171ed346ce68f6984e585ef4
-HLSLIB_COMMIT=6b88db826bb023937506913a23d964775a7606af
-PYVERILATOR_COMMIT=fb1afefa5b207acf6fec28f8abb72a862f2ca1d2
-PYNQSHELL_COMMIT=0c82a61b0ec1a07fa275a14146233824ded7a13d
-
+HLSLIB_COMMIT=cfafe11a93b79ab1af7529d68f08886913a6466e
+PYVERILATOR_COMMIT=c97a5ba41bbc7c419d6f25c74cdf3bdc3393174f
+OMX_COMMIT=1bae737669901e762f581af73348332b5c4b2ada
 
 gecho "Setting up known-good commit versions for FINN dependencies"
 # Brevitas
 gecho "brevitas @ $BREVITAS_COMMIT"
 git -C /workspace/brevitas pull --quiet
 git -C /workspace/brevitas checkout $BREVITAS_COMMIT --quiet
-# Brevitas examples
-gecho "brevitas_cnv_lfc @ $BREVITAS_EXAMPLES_COMMIT"
-git -C /workspace/brevitas_cnv_lfc pull --quiet
-git -C /workspace/brevitas_cnv_lfc checkout $BREVITAS_EXAMPLES_COMMIT --quiet
+pip install --user -e /workspace/brevitas
 # CNPY
 gecho "cnpy @ $CNPY_COMMIT"
 git -C /workspace/cnpy pull --quiet
@@ -42,12 +36,48 @@ git -C /workspace/finn-hlslib checkout $HLSLIB_COMMIT --quiet
 gecho "PyVerilator @ $PYVERILATOR_COMMIT"
 git -C /workspace/pyverilator pull --quiet
 git -C /workspace/pyverilator checkout $PYVERILATOR_COMMIT --quiet
-# PYNQ-HelloWorld
-gecho "PYNQ shell @ $PYNQSHELL_COMMIT"
-git -C /workspace/PYNQ-HelloWorld pull --quiet
-git -C /workspace/PYNQ-HelloWorld checkout $PYNQSHELL_COMMIT --quiet
+# oh-my-xilinx
+gecho "oh-my-xilinx @ $OMX_COMMIT"
+git -C /workspace/oh-my-xilinx pull --quiet
+git -C /workspace/oh-my-xilinx checkout $OMX_COMMIT --quiet
 
-# source Vivado env.vars
-source $VIVADO_PATH/settings64.sh
+if [ ! -z "$VIVADO_PATH" ];then
+  # source Vivado env.vars
+  export XILINX_VIVADO=$VIVADO_PATH
+  source $VIVADO_PATH/settings64.sh
+fi
+if [ ! -z "$VITIS_PATH" ];then
+  # source Vitis env.vars
+  export XILINX_VITIS=$VITIS_PATH
+  source $VITIS_PATH/settings64.sh
+fi
+if [ ! -z "$XILINX_XRT" ];then
+  # TODO install XRT dependencies?
+  # wget https://raw.githubusercontent.com/Xilinx/XRT/master/src/runtime_src/tools/scripts/xrtdeps.sh
+  # apt-get update
+  # bash xrtdeps.sh
+  # rm xrtdeps.sh
+  # source XRT
+  source $XILINX_XRT/setup.sh
+fi
 
+# download PYNQ board files if not already there
+if [ ! -d "/workspace/finn/board_files" ]; then
+    gecho "Downloading PYNQ board files for Vivado"
+    wget -q https://github.com/cathalmccabe/pynq-z1_board_files/raw/master/pynq-z1.zip
+    wget -q https://d2m32eurp10079.cloudfront.net/Download/pynq-z2.zip
+    unzip -q pynq-z1.zip
+    unzip -q pynq-z2.zip
+    mkdir /workspace/finn/board_files
+    mv pynq-z1/ board_files/
+    mv pynq-z2/ board_files/
+    rm pynq-z1.zip
+    rm pynq-z2.zip
+fi
+if [ ! -d "/workspace/finn/board_files/ultra96v1" ]; then
+    gecho "Downloading Avnet BDF files into board_files"
+    git clone https://github.com/Avnet/bdf.git
+    mv /workspace/finn/bdf/* /workspace/finn/board_files/
+    rm -rf /workspace/finn/bdf
+fi
 exec "$@"
diff --git a/docker/quicktest.sh b/docker/quicktest.sh
new file mode 100755
index 0000000000000000000000000000000000000000..b06feccdc578a59c8ef00531871e1211c2a407e5
--- /dev/null
+++ b/docker/quicktest.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+: ${PYTEST_PARALLEL=auto}
+
+cd $FINN_ROOT
+# check if command line argument is empty or not present
+if [ -z $1 ]; then
+  echo "Running quicktest: not (vivado or slow) with pytest-xdist"
+  python setup.py test --addopts "-m 'not (vivado or slow or vitis)' --dist=loadfile -n $PYTEST_PARALLEL"
+elif [ $1 = "main" ]; then
+  echo "Running main test suite: not (rtlsim or end2end) with pytest-xdist"
+  python setup.py test --addopts "-k 'not (rtlsim or end2end)' --dist=loadfile -n $PYTEST_PARALLEL"
+elif [ $1 = "rtlsim" ]; then
+  echo "Running rtlsim test suite with pytest-parallel"
+  python setup.py test --addopts "-k rtlsim --workers $PYTEST_PARALLEL"
+elif [ $1 = "end2end" ]; then
+  echo "Running end2end test suite with no parallelism"
+  python setup.py test --addopts "-k end2end"
+elif [ $1 = "full" ]; then
+  echo "Running full test suite, each step with appropriate parallelism"
+  $0 main;
+  $0 rtlsim;
+  $0 end2end;
+else
+  echo "Unrecognized argument to quicktest.sh"
+fi
diff --git a/docs/_posts/2020-09-21-finn-v04b-beta-is-released.md b/docs/_posts/2020-09-21-finn-v04b-beta-is-released.md
new file mode 100644
index 0000000000000000000000000000000000000000..957d7ed5354ac32957ac7892435475586e61fa65
--- /dev/null
+++ b/docs/_posts/2020-09-21-finn-v04b-beta-is-released.md
@@ -0,0 +1,53 @@
+---
+layout: post
+title:  "FINN v0.4b (beta) is released"
+author: "Yaman Umuroglu"
+---
+
+Version v0.4b (beta) of the FINN compiler is now available. As with the previous
+release there's a whole lot of new features and bugfixes that have gone in,
+but here are some highlights:
+
+<img src="https://mma.prnewswire.com/media/752936/ALVEO_PRESS.jpg" width="300" align="center"/>
+
+**Build support for Alveo/Vitis + more Zynq variants.** We now have a
+`VitisBuild` transformation to provide a FINN flow that goes all the way to
+bitfiles targeting Xilinx Alveo platforms. This transformation takes care of
+FIFO, datawidth converter and DMA engine insertion so you can simply give it a
+FINN model with HLS layers and let it run.
+Similarly, we've simplified the Zynq build flow with `ZynqBuild` to provide a
+similar experience, which should now be able to support most Zynq and Zynq
+UltraScale+ platforms.
+You can read more about the new hardware build transformations
+<a href="https://finn.readthedocs.io/en/latest/hw_build.html">here</a>.
+
+
+**Fully-accelerated end-to-end examples + dashboard.** The FINN end-to-end example networks
+are now fully accelerated on the FPGA, allowing raw images to be directly fed in
+and top-1 indices to be retrieved.
+We now also have a <a href="https://bit.ly/finn-end2end-dashboard">dashboard</a>
+which gets automatically updated with the latest build results from end-to-end
+examples, including FPGA resources and performance.
+This also enables running full-performance accuracy validation on hardware,
+which is now incorporated into the <a href="https://github.com/Xilinx/finn/blob/master/notebooks/end2end_example/tfc_end2end_example.ipynb#validation">end-to-end notebooks</a>.
+
+<img src="https://xilinx.github.io/finn/finn/img/finn-brevitas-debug.png" width="300" align="center"/>
+
+**Brevitas-FINN co-debug support.** We can now export graphs from Brevitas with special DebugMarker nodes (like above) and PyTorch forward hooks to compare intermediate activations between the Brevitas version and FINN-ONNX exported version. This is handy for debugging especially larger networks when they don't export correctly. <a href="https://github.com/Xilinx/finn/blob/dev/tests/brevitas/test_brevitas_debug.py">Here</a> is an example of how to use this.
+
+<img src="https://xilinx.github.io/finn/finn/img/accumulator-minimization.png" width="300" align="center"/>
+
+**Accumulator minimization.** When converting to HLS layers, FINN will now automatically try to pick a minimal bitwidth for each accumulator, based on the precision and size of the dot product it accumulates over. While prior accumulators were at a fixed bitwidth like
+32-bits, the new approach can significantly save on resources by picking e.g. 10-bit accumulators (as per above) where possible. We've also expanded the range of DataTypes available in FINN to cover everything between 1-32 bits to provide more flexibility.
+
+**New layers and cycle estimation.** We've been working on supporting more of the finn-hlslib layers in FINN and
+<a href="https://github.com/Xilinx/finn/tree/dev/src/finn/custom_op/fpgadataflow">the list</a>  has expanded significantly.
+Many of these layers (and their accompanying conversion transformations) will be utilized for new FINN end-to-end example networks,
+like MobileNet-v1, ResNet-50 and a QuartzNet, over the course of the next few releases. These layers also support <a href="https://github.com/Xilinx/finn/blob/dev/src/finn/analysis/fpgadataflow/exp_cycles_per_layer.py">clock cycle estimation</a>
+based on workload and parallelization parameters, allowing the user to estimate performance without having to go to synthesis.
+
+The release (tagged 0.4b) is now available on GitHub.
+We're continuously working to improve FINN in terms of layer, network and
+infrastructure.
+If you'd like to help out, please check out the <a href="https://github.com/Xilinx/finn/blob/master/CONTRIBUTING.md">contribution guidelines</a> and
+share your ideas on the <a href="https://gitter.im/xilinx-finn/community">FINN Gitter channel</a>!
diff --git a/docs/finn-sheduling-and-folding.pptx b/docs/finn-sheduling-and-folding.pptx
new file mode 100644
index 0000000000000000000000000000000000000000..30bbe4d55b1cda9df25a791227983dc7cb750e58
Binary files /dev/null and b/docs/finn-sheduling-and-folding.pptx differ
diff --git a/docs/finn/brevitas_export.rst b/docs/finn/brevitas_export.rst
index 443b692a2d05b48b2e395373411c3d5382825c6c..83684ae092609ef0f83a5525508febf4676b2d7a 100644
--- a/docs/finn/brevitas_export.rst
+++ b/docs/finn/brevitas_export.rst
@@ -10,7 +10,7 @@ Brevitas Export
    :scale: 70%
    :align: center
 
-FINN expects an ONNX model as input. This can be a model trained with `Brevitas <https://github.com/Xilinx/brevitas>`_. Brevitas is a PyTorch library for quantization-aware training and the FINN Docker image comes with several `example Brevitas networks <https://github.com/maltanar/brevitas_cnv_lfc>`_. Brevitas provides an export of a quantized network in ONNX representation. The resulting model consists only of `ONNX standard nodes <https://github.com/onnx/onnx/blob/master/docs/Operators.md>`_, but also contains additional attributes for the ONNX nodes to represent low precision datatypes. To work with the model it is wrapped into :ref:`modelwrapper` provided by FINN. 
+FINN expects an ONNX model as input. This can be a model trained with `Brevitas <https://github.com/Xilinx/brevitas>`_. Brevitas is a PyTorch library for quantization-aware training and the FINN Docker image comes with several `example Brevitas networks <https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq>`_. Brevitas provides an export of a quantized network in ONNX representation. The resulting model consists only of `ONNX standard nodes <https://github.com/onnx/onnx/blob/master/docs/Operators.md>`_, but also contains additional attributes for the ONNX nodes to represent low precision datatypes. To work with the model it is wrapped into :ref:`modelwrapper` provided by FINN.
 
 At this stage we can already use the functional verification flow to simulate the model using Python, this is marked in the graphic with the dotted arrow. For more details please have look at :ref:`verification`.
 
diff --git a/docs/finn/end_to_end_flow.rst b/docs/finn/end_to_end_flow.rst
index fd6960a2f94774ab783cf1cff220ac604d9fbe92..529df57a5efe636cdd508efc3297bd05013a6b71 100644
--- a/docs/finn/end_to_end_flow.rst
+++ b/docs/finn/end_to_end_flow.rst
@@ -20,6 +20,5 @@ For a more detailed overview about the different flow sections, please have a lo
 
    brevitas_export
    nw_prep
-   vivado_synth
-   pynq_deploy
+   hw_build
    verification
diff --git a/docs/finn/example_networks.rst b/docs/finn/example_networks.rst
index 47c9a976cb14a3e175dff6800ad8a5da60b44ecb..eeead2d3138d8d3200011108a6f7260f23e4ef36 100644
--- a/docs/finn/example_networks.rst
+++ b/docs/finn/example_networks.rst
@@ -4,33 +4,23 @@
 Example Networks
 ****************
 
-FINN uses `several pre-trained QNNs <https://github.com/maltanar/brevitas_cnv_lfc>`_ that serve as examples and testcases.
-You can find a status summary below for each network.
+FINN uses <a href="https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq">
+several pre-trained QNNs</a>
+that serve as examples and testcases.
 
 * TFC, SFC, LFC... are fully-connected networks trained on the MNIST dataset
 * CNV is a convolutional network trained on the CIFAR-10 dataset
 * w\_a\_ refers to the quantization used for the weights (w) and activations (a) in bits
 
-The rows in the table are different steps of the FINN end-to-end flow.
-If a particular network is supported for a particular step in the current FINN
-version, this is indicated by an x mark in the table.
+These networks are built end-to-end as part of the <a href="https://github.com/Xilinx/finn/blob/master/tests/end2end/test_end2end_bnn_pynq.py">FINN integration tests</a>,
+and the key performance indicators (FPGA resource, frames per second...) are
+automatically posted to the dashboard below.
+To implement a new network, you can use the <a href="https://github.com/Xilinx/finn/blob/dev/tests/end2end/test_end2end_bnn_pynq.py">
+integration test code</a> as a starting point, as well as the relevant
+<a href="https://github.com/Xilinx/finn/tree/master/notebooks/end2end_example">
+Jupyter notebooks.</a>
 
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| FINN step             | Basic test | TFC-w1a1 | TFC-w1a2 | TFC-w2a2 | CNV-w1a1 | CNV-w1a2 | CNV-w2a2 |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| Export/Import         | x          | x        | x        | x        | x        |    x     |     x    |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| Streamlining          | x          | x        | x        | x        | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| Convert to HLS layers | x          | x        | x        | x        | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| Stitched IP           | x          | x        | x        | x        | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| Hardware test         | x          | x        | x        |          | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| cppsim                | x          | x        | x        | x        | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| rtlsim node-by-node   | x          | x        | x        | x        | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
-| rtlsim stitched IP    | x          | x        | x        | x        | x        |          |          |
-+-----------------------+------------+----------+----------+----------+----------+----------+----------+
+<a href="https://bit.ly/finn-end2end-dashboard">
+  <img src="https://firebasestorage.googleapis.com/v0/b/drive-assets.google.com.a.appspot.com/o/Asset%20-%20Drive%20Icon512.png?alt=media" width="50" align="center" />
+  FINN end-to-end dashboard on Google Drive
+</a>
diff --git a/docs/finn/getting_started.rst b/docs/finn/getting_started.rst
index e2b56b7f75a489f4e4f08bccdc3b889822f81838..3bfaf2e1a7f5cbbeb8760ad7c51bd941338f6360 100644
--- a/docs/finn/getting_started.rst
+++ b/docs/finn/getting_started.rst
@@ -13,11 +13,14 @@ The FINN compiler should not be thought of a single pushbutton tool that does ev
 Requirements
 ============
 
-* Ubuntu 18.04
+* Ubuntu 18.04 with ``bash`` installed
 * Docker
-* A working Vivado 2019.1 installation
-* A `VIVADO_PATH` environment variable pointing to the Vivado installation directory (e.g. the directory where settings64.sh is located)
+* A working Vivado 2019.1 or 2020.1 installation
+* A ``VIVADO_PATH`` environment variable pointing to the Vivado installation directory (e.g. the directory where settings64.sh is located)
 * (optional) A PYNQ board with a network connection
+   * the ``bitstring`` package must be installed on the PYNQ: ``sudo pip3 install bitstring``
+* (optional) An Alveo board, and a working Vitis 2020.1 installation if you want to use Vitis and Alveo (see `Alveo first-time setup`_ below)
+
 
 Running FINN in Docker
 ======================
@@ -25,11 +28,14 @@ We use Docker extensively for developing and deploying FINN. If you are not fami
 
 Getting an interactive shell for development or experimentation
 ***************************************************************
+.. note:: **run-docker.sh requires bash to execute correctly.**
+
 ::
 
-  sh run_docker.sh
+  ./run_docker.sh
 
 Simply running sh run-docker.sh without any additional arguments will clone the dependency repos, create a Docker container and give you a terminal with you can use for development for experimentation.
+If you want a new terminal on an already-running container, you can do this with `docker exec -it finn_dev_<username> bash`.
 
 .. warning:: The Docker container is spawned with the `--rm` option, so make sure that any important files you created inside the container are either in the /workspace/finn folder (which is mounted from the host computer) or otherwise backed up.
 
@@ -39,7 +45,7 @@ Running the Jupyter notebooks
 *****************************
 ::
 
-  sh run-docker.sh notebook
+  ./run-docker.sh notebook
 
 This will launch the `Jupyter notebook <https://jupyter.org/>`_ server inside a Docker container, and print a link on the terminal that you can open in your browser to run the FINN notebooks or create new ones.
 .. note:: The link will look something like this (the token you get will be different):
@@ -49,11 +55,34 @@ The run-docker.sh script forwards ports 8888 for Jupyter and 8081 for Netron, an
 
 Running the test suite directly
 *******************************
+FINN comes with a set of tests to check for regressions. The full test suite
+(which will take several hours to run and require a PYNQ board) can be executed
+by:
+
+::
+
+  ./run-docker.sh test
+
+There is a quicker variant of the test suite that skips the tests marked as
+requiring Vivado or as slow-running tests:
+
+::
+
+  ./run-docker.sh quicktest
+
+If you want to run individual tests, you can do this *inside the Docker container
+from the FINN root directory* as follows:
+
 ::
 
-  sh run-docker.sh test
+  python setup.py test --addopts "-k test_end2end_tfc_w1a2"
 
-FINN comes with a set of tests which can be launched using the command above. Note that some of the tests involve extra compilation and the entire test suite may take some time to complete.
+Finally, if you want to run tests in parallel (e.g. to take advantage of a multi-core CPU)
+you can use:
+ * pytest-parallel for any rtlsim tests, e.g. `python setup.py test --addopts "-k rtlsim --workers auto"`
+ * pytest-xdist for anything else, make sure to add `--dist=loadfile` if you have tests in the same file that have dependencies on each other e.g. `python setup.py test --addopts "-k mytest -n auto --dist=loadfile"`
+
+Please see the pytest documentation for more about picking tests by marks or by name.
 
 Environment variables
 **********************
@@ -61,11 +90,41 @@ Environment variables
 Prior to running the `run-docker.sh` script, there are several environment variables you can set to configure certain aspects of FINN.
 These are summarized below:
 
-* `VIVADO_PATH` points to your Vivado installation on the host
-* `JUPYTER_PORT` (default 8888) changes the port for Jupyter inside Docker
-* `NETRON_PORT` (default 8081) changes the port for Netron inside Docker
-* `NUM_DEFAULT_WORKERS` (default 1) specifies the degree of parallelization for the transformations that can be run in parallel
-* `PYNQ_BOARD` specifies the type of PYNQ board used (Pynq-Z1, Pynq-Z2, Ultra96, ZCU104) for the test suite
-* `PYNQ_IP` and `PYNQ_PORT` specify ip address and port number to access the PYNQ board
-* `PYNQ_USERNAME` and `PYNQ_PASSWORD` specify the PYNQ board access credentials for the test suite
-* `PYNQ_TARGET_DIR` specifies the target dir on the PYNQ board for the test suite
+* ``VIVADO_PATH`` points to your Vivado installation on the host
+* (optional, for Vitis & Alveo only) ``VITIS_PATH``, ``PLATFORM_REPO_PATHS`` and ``XILINX_XRT`` respectively point to your Vitis installation, the Vitis platform files, and Xilinx XRT
+* ``JUPYTER_PORT`` (default 8888) changes the port for Jupyter inside Docker
+* ``NETRON_PORT`` (default 8081) changes the port for Netron inside Docker
+* ``NUM_DEFAULT_WORKERS`` (default 1) specifies the degree of parallelization for the transformations that can be run in parallel
+* ``PYNQ_BOARD`` or ``ALVEO_BOARD`` specifies the type of PYNQ/Alveo board used (see "supported hardware" below) for the test suite
+* ``PYNQ_IP`` and ``PYNQ_PORT`` (or ``ALVEO_IP`` and ``ALVEO_PORT``) specify ip address and port number to access the PYNQ board / Alveo target
+* ``PYNQ_USERNAME`` and ``PYNQ_PASSWORD`` (or ``ALVEO_USERNAME`` and ``ALVEO_PASSWORD``) specify the PYNQ board / Alveo host access credentials for the test suite. For PYNQ, password is always needed to run as sudo. For Alveo, you can leave the password empty and place your ssh private key in the ``finn/ssh_keys`` folder to use keypair authentication.
+* ``PYNQ_TARGET_DIR`` (or ``ALVEO_TARGET_DIR``) specifies the target dir on the PYNQ board / Alveo host for the test suite
+
+Supported Hardware
+===================
+**End-to-end support including driver:** For quick deployment, FINN targets boards supported by  `PYNQ <https://pynq.io/>`_ . For these platforms, we can build a full bitfile including DMAs to move data into and out of the FINN-generated accelerator, as well as a Python driver to launch the accelerator. We support the Pynq-Z1, Pynq-Z2, Ultra96, ZCU102 and ZCU104 boards.
+As of FINN v0.4b we also have preliminary support for `Xilinx Alveo boards <>`_ using PYNQ and Vitis, see instructions below for Alveo setup.
+
+**Vivado IPI support for any Xilinx FPGA:** FINN generates a Vivado IP Integrator (IPI) design from the neural network with AXI stream (FIFO) in-out interfaces, which can be integrated onto any Xilinx FPGA as part of a larger system. It's up to you to take the FINN-generated accelerator (what we call "stitched IP" in the tutorials), wire it up to your FPGA design and send/receive neural network data to/from the accelerator.
+
+Alveo first-time setup
+**********************
+We use *host* to refer to the PC running the FINN Docker environment, which will build the accelerator+driver and package it up, and *target* to refer to the PC where the Alveo card is installed. These two can be the same PC, or connected over the network -- FINN includes some utilities to make it easier to test on remote PCs too. Prior to first usage, you need to set up both the host and the target in the following manner:
+
+On the target side:
+
+1. Install Xilinx XRT and set up the ``XILINX_XRT`` environment variable to point to your installation, for instance ``/opt/xilinx/xrt``.
+2. Install the Vitis platform files for Alveo and set up the ``PLATFORM_REPO_PATHS`` environment variable to point to your installation, for instance ``/opt/xilinx/platforms``.
+3. Create a conda environment named *finn-pynq-alveo* by following this guide `to set up PYNQ for Alveo <https://pynq.readthedocs.io/en/latest/getting_started/alveo_getting_started.html>`_. It's best to follow the recommended environment.yml (set of package versions) in this guide.
+4. Activate the environment with `conda activate finn-pynq-alveo` and install the bitstring package with ``pip install bitstring``
+5. Done! You should now be able to e.g. ``import pynq`` in Python scripts.
+6 (optional) If you don't want to specify the ``ALVEO_PASSWORD`` environment variable, you can `set up public key authentication <https://www.digitalocean.com/community/tutorials/how-to-configure-ssh-key-based-authentication-on-a-linux-server>`_. Copy your private key to the ``finn/ssh_keys`` folder on the host to get password-less deployment and remote execution.
+
+
+On the host side:
+
+1. Install Vitis 2020.1 and set up the ``VITIS_PATH`` environment variable to point to your installation.
+2. Install Xilinx XRT and set up the ``XILINX_XRT`` environment variable to point to your installation. *This must be the same path as the target's XRT (target step 1)*
+3. Install the Vitis platform files for Alveo and set up the ``PLATFORM_REPO_PATHS`` environment variable to point to your installation. *This must be the same path as the target's platform files (target step 2)*
+4. Set up the ``ALVEO_*`` environment variables accordingly for your target, see description of environment variables above.
+5. Done! You can try the ``test_end2end_vitis`` tests in the FINN Docker to verify your setup, although this will take some time.
diff --git a/docs/finn/hw_build.rst b/docs/finn/hw_build.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9f292a91c352d3c0d4f7adb340c85f15c1b52a53
--- /dev/null
+++ b/docs/finn/hw_build.rst
@@ -0,0 +1,100 @@
+.. _hw_build:
+
+*****************************
+Hardware Build and Deployment
+*****************************
+
+.. image:: /img/finn-hw-build.png
+   :scale: 70%
+   :align: center
+
+A model where all layers have been converted to HLS layers can be processed by
+FINN to build a bitfile targeting either a Zynq or Alveo system.
+
+Hardware Build
+==============
+
+Internally, the hardware build consists of the following steps:
+
+1. Driver generation
+2. DMA and DWC node insertion
+3. Partitioning for floorplanning
+4. FIFO insertion and IP generation
+5. Vivado/Vitis project generation and synthesis
+
+.. note:: **In previous FINN releases it was necessary to step through the
+individual sub-steps for hardware build manually by calling each transformation.
+The hardware build transformations `ZynqBuild` and `VitisBuild` now execute all
+necessary sub-transformations. For more control over the build process, the
+transformations listed below can still be called individually.
+**
+
+Driver Generation
+------------------
+
+To rapidly test the generated design on PYNQ platforms, FINN is capable of
+generating a Python driver for the given design. This driver packs/unpacks the
+input/output tensors in the expected format, then uses PYNQ APIs to initiate
+data movement and transfer back the results to the host CPU. The generation of
+the driver is done by transformation pass :py:mod:`finn.transformation.fpgadataflow.make_pynq_driver.MakePYNQDriver`.
+
+DMA and DWC Node Insertion
+---------------------------
+
+At this step, FINN will insert custom hardware-oriented ONNX nodes into the graph.
+These are DMA engines for moving data into and out of the accelerator (from DRAM),
+and data width converters between consecutive nodes where required.
+These steps are accomplished by calling the :py:mod:`finn.transformation.fpgadataflow.insert_iodma.InsertIODMA`
+and :py:mod:`finn.transformation.fpgadataflow.insert_dwc.InsertDWC` transformations,
+respectively.
+
+Partitioning for Floorplanning
+-------------------------------
+
+FINN will now partition the graph into several StreamingDataflowPartitions.
+This capability is most to facilitate floorplanning for future FINN releases
+and does not alter the functioning of the model itself. Each DMA node will be
+placed into its own partition. If no partition number attributes are specified,
+all the regular network nodes will become a single partition.
+This is accomplished by the :py:mod:`finn.transformation.fpgadataflow.floorplan.Floorplan`
+and :py:mod:`finn.transformation.fpgadataflow.create_dataflow_partition.CreateDataflowPartition`
+transformations.
+
+.. note:: **For Vitis, each partition will be compiled as a separate kernel,
+and linked together afterwards. For Zynq, each partition will become an IP
+block. **
+
+
+FIFO Insertion and IP Generation
+-------------------------------
+
+FINN will descend into each partition and insert FIFO nodes between streaming nodes,
+where FIFO depths dictated by the node attributes, using the :py:mod:`finn.transformation.fpgadataflow.insert_fifo.InsertFIFO`
+transformation.
+Afterwards, IP blocks will be created for each partition, which in turn contain the
+IP blocks for each layer stitched together. The layer-level IP blocks
+are generated by Vivado HLS, using a sequence of :py:mod:`finn.transformation.fpgadataflow.prepare_ip.PrepareIP`
+and :py:mod:`finn.transformation.fpgadataflow.hlssynth_ip.HLSSynthIP` transformations.
+The top-level IP blocks are generated in Vivado IPI, using the :py:mod:`finn.transformation.fpgadataflow.create_stitched_ip.CreateStitchedIP` transformation.
+
+Vivado/Vitis Project Generation and Synthesis
+---------------------------------------------
+
+The final step in the hardware build flow is to generate a Vivado (for Zynq) or Vitis (for Alveo)
+project, and run synthesis to generate a bitfile. This is done using the `MakeZYNQProject`
+transformation for Zynq, and the `VitisLink` transformation for Alveo.
+
+
+Deployment
+==========
+
+
+Deployment and Remote Execution
+-------------------------------
+
+The bitfile and the driver file(s) are copied to the PYNQ board and can be executed there using the *onnx_exec* function with the right *exec_mode* settings. For details please have a look at transformation :py:mod:`finn.transformation.fpgadataflow.make_deployment.DeployToPYNQ` and the execution function :py:mod:`finn.core.onnx_exec`.
+
+Throughput Test
+---------------
+
+FINN also offers the possibility to measure the network performance directly on the PYNQ board. This can be done by using :py:mod:`finn.core.throughput_test`. When running this function the metrics of the network are returned as dictionary.
diff --git a/docs/finn/img/finn-hw-build.png b/docs/finn/img/finn-hw-build.png
new file mode 100644
index 0000000000000000000000000000000000000000..f3a591fa8f9e25f99b44d2bd9502bf3ae979818b
Binary files /dev/null and b/docs/finn/img/finn-hw-build.png differ
diff --git a/docs/finn/internals.rst b/docs/finn/internals.rst
index 7a4bc687eeb827320991f7d3f1ef8cc35e97f3da..dee62f09a9253380e05300dac8fa34915c20dab5 100644
--- a/docs/finn/internals.rst
+++ b/docs/finn/internals.rst
@@ -16,6 +16,10 @@ Custom Quantization Annotations
 
 ONNX does not support datatypes smaller than 8-bit integers, whereas in FINN we are interested in smaller integers down to ternary and bipolar. To make this work, FINN uses the quantization_annotation field in ONNX to annotate tensors with their FINN DataType (:py:mod:`finn.core.datatype.DataType`) information. However, all tensors are expected to use single-precision floating point (float32) storage in FINN. This means we store even a 1-bit value as floating point for the purposes of representation. The FINN compiler flow is responsible for eventually producing a packed representation for the target hardware, where the 1-bit is actually stored as 1-bit.
 
+Note that FINN uses floating point tensors as a carrier data type to represent integers. Floating point arithmetic can introduce rounding errors, e.g. (int_num * float_scale) / float_scale is not always equal to int_num.
+When using the custom ONNX execution flow, FINN will attempt to sanitize any rounding errors for integer tensors. See (:py:mod:`finn.util.basic.sanitize_quant_values`) for more information.
+This behavior can be disabled (not recommended!) by setting the environment variable SANITIZE_QUANT_TENSORS=0.
+
 Custom Operations/Nodes
 =======================
 
diff --git a/docs/finn/pynq_deploy.rst b/docs/finn/pynq_deploy.rst
deleted file mode 100644
index ef36da9e7f8ceee19abfd061fde10bf06d3afb62..0000000000000000000000000000000000000000
--- a/docs/finn/pynq_deploy.rst
+++ /dev/null
@@ -1,41 +0,0 @@
-.. _pynq_deploy:
-
-***************
-PYNQ Deployment
-***************
-
-.. note:: **This website is currently under construction.**
-
-.. image:: /img/pynq-deploy.png
-   :scale: 70%
-   :align: center
-
-This chapter is about the hardware generation and deployment on PYNQ. If you need more information about PYNQ, please have a look at the `PYNQ website <https://pynq.readthedocs.io/en/v2.5.1/>`_.
-
-Create PYNQ Shell Project
-=========================
-
-To deploy the network on A PYNQ platform, it needs to be put inside an appropriate *shell*. This *shell* bridges the network with the interfaces the underlying system exposes. This can be done using the transformation MakePYNQProject, see :py:mod:`finn.transformation.fpgadataflow.make_pynq_proj.MakePYNQProject`.
-
-Test on Hardware
-================
-
-Synthesis, Place and Route
---------------------------
-
-After integrating the model into the PYNQ shell, Vivado *Synthesis, Place and Route* can be launched. The result is a bitfile which can be used for the PYNQ board. In FINN this can be done using a transformation pass. For details, please have a look at :py:mod:`finn.transformation.fpgadataflow.synth_pynq_proj.SynthPYNQProject`.
-
-Generate PYNQ runtime code
---------------------------
-
-Additionally, a Python code is necessary to execute the model on the board. This is done by transformation pass :py:mod:`finn.transformation.fpgadataflow.make_pynq_driver.MakePYNQDriver`.
-
-Deployment and Remote Execution
--------------------------------
-
-The bitfile and the driver file(s) are copied to the PYNQ board and can be executed there using the *onnx_exec* function with the right *exec_mode* settings. For details please have a look at transformation :py:mod:`finn.transformation.fpgadataflow.make_deployment.DeployToPYNQ` and the execution function :py:mod:`finn.core.onnx_exec`.
-
-Throughput Test
----------------
-
-Finn also offers the possibility to measure the network performance directly on the PYNQ board. This can be done by using :py:mod:`finn.core.throughput_test`. When running this function the metrics of the network are returned as dictionary.
diff --git a/docs/finn/verification.rst b/docs/finn/verification.rst
index 391c6f999312839daca0d4161336c7c0ae822f89..c52c0840aa40566d930164490b1fd249d7c07757 100644
--- a/docs/finn/verification.rst
+++ b/docs/finn/verification.rst
@@ -28,4 +28,15 @@ This simulation can be used for a model containing several HLS custom operations
 Emulation using PyVerilator
 ===========================
 
-The emulation using PyVerilator can be used when IP blocks were generated, either node by node or of a whole design. For that purpose PyVerilator gets the generated verilog files.
+The emulation using PyVerilator can be used when IP blocks were generated, either node by node or of a whole (IP-stitched) design. For that purpose PyVerilator gets the generated verilog files.
+
+For debugging purposes, it's possible to generate .vcd trace files that show the value of external & internal signals as the emuation is running. To enable this:
+ - for node-by-node rtlsim, set the `rtlsim_trace` attribute of each node of interest to either a file name for the vcd or `default` to use the node name as the filename.
+ - for IP-stitched rtlsim, set the `rtlsim_trace` metadata_prop  for the graph as per above.
+
+To control the tracing depth in the module hierarchy, use the `RTLSIM_TRACE_DEPTH` environment variable (default is 1):
+ - level 1 shows top-level input/output streams
+ - level 2 shows per-layer input/output streams
+ - level 3 shows per full-layer I/O including FIFO count signals
+
+Note that deeper tracing will take longer to execute and may produce very large .vcd files.
diff --git a/docs/img/accumulator-minimization.png b/docs/img/accumulator-minimization.png
new file mode 100644
index 0000000000000000000000000000000000000000..76f81bf9192f0fbc894f89c6c4673c9542f65817
Binary files /dev/null and b/docs/img/accumulator-minimization.png differ
diff --git a/docs/img/finn-brevitas-debug.png b/docs/img/finn-brevitas-debug.png
new file mode 100644
index 0000000000000000000000000000000000000000..fdc260412fb5066e3993abd94c6160456b3c5f1b
Binary files /dev/null and b/docs/img/finn-brevitas-debug.png differ
diff --git a/finn-rtllib/memstream/component.xml b/finn-rtllib/memstream/component.xml
index 6b728c0555a4889b8e76d5759233d1109a3002bd..14af1610385a735ad987fd1055ff9f90f76e4a23 100644
--- a/finn-rtllib/memstream/component.xml
+++ b/finn-rtllib/memstream/component.xml
@@ -260,7 +260,7 @@
         <spirit:parameters>
           <spirit:parameter>
             <spirit:name>viewChecksum</spirit:name>
-            <spirit:value>ba6d3300</spirit:value>
+            <spirit:value>3c30c4ac</spirit:value>
           </spirit:parameter>
         </spirit:parameters>
       </spirit:view>
@@ -276,7 +276,7 @@
         <spirit:parameters>
           <spirit:parameter>
             <spirit:name>viewChecksum</spirit:name>
-            <spirit:value>54f61a0e</spirit:value>
+            <spirit:value>00b5320e</spirit:value>
           </spirit:parameter>
         </spirit:parameters>
       </spirit:view>
@@ -290,7 +290,7 @@
         <spirit:parameters>
           <spirit:parameter>
             <spirit:name>viewChecksum</spirit:name>
-            <spirit:value>92c3ebfc</spirit:value>
+            <spirit:value>d714c73b</spirit:value>
           </spirit:parameter>
         </spirit:parameters>
       </spirit:view>
@@ -780,6 +780,11 @@
         <spirit:displayName>Mem Init</spirit:displayName>
         <spirit:value spirit:resolve="generated" spirit:id="MODELPARAM_VALUE.MEM_INIT">./</spirit:value>
       </spirit:modelParameter>
+      <spirit:modelParameter spirit:dataType="string">
+        <spirit:name>RAM_STYLE</spirit:name>
+        <spirit:displayName>Ram Style</spirit:displayName>
+        <spirit:value spirit:resolve="generated" spirit:id="MODELPARAM_VALUE.RAM_STYLE">auto</spirit:value>
+      </spirit:modelParameter>
       <spirit:modelParameter spirit:dataType="integer">
         <spirit:name>STRM0_WIDTH</spirit:name>
         <spirit:displayName>Strm0 Width</spirit:displayName>
@@ -873,6 +878,12 @@
     </spirit:modelParameters>
   </spirit:model>
   <spirit:choices>
+    <spirit:choice>
+      <spirit:name>choice_list_44c459b8</spirit:name>
+      <spirit:enumeration>auto</spirit:enumeration>
+      <spirit:enumeration>block</spirit:enumeration>
+      <spirit:enumeration>distributed</spirit:enumeration>
+    </spirit:choice>
     <spirit:choice>
       <spirit:name>choice_list_9d8b0d81</spirit:name>
       <spirit:enumeration>ACTIVE_HIGH</spirit:enumeration>
@@ -891,9 +902,26 @@
         <spirit:fileType>verilogSource</spirit:fileType>
       </spirit:file>
       <spirit:file>
-        <spirit:name>hdl/ramb18.v</spirit:name>
+        <spirit:name>hdl/memstream_singleblock.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:logicalName>xil_defaultlib</spirit:logicalName>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/memstream_multiblock.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:logicalName>xil_defaultlib</spirit:logicalName>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/ramb18_wf_dualport.v</spirit:name>
         <spirit:fileType>verilogSource</spirit:fileType>
-        <spirit:userFileType>CHECKSUM_13578c44</spirit:userFileType>
+        <spirit:userFileType>CHECKSUM_9425c051</spirit:userFileType>
+        <spirit:logicalName>xil_defaultlib</spirit:logicalName>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/ramb18_sdp.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>CHECKSUM_9f7c64bf</spirit:userFileType>
+        <spirit:logicalName>xil_defaultlib</spirit:logicalName>
       </spirit:file>
     </spirit:fileSet>
     <spirit:fileSet>
@@ -902,13 +930,43 @@
         <spirit:name>sim/tb_memstream.v</spirit:name>
         <spirit:fileType>verilogSource</spirit:fileType>
       </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/memstream.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>USED_IN_ipstatic</spirit:userFileType>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/mux.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>USED_IN_ipstatic</spirit:userFileType>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/memstream_singleblock.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>USED_IN_ipstatic</spirit:userFileType>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/memstream_multiblock.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>USED_IN_ipstatic</spirit:userFileType>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/ramb18_wf_dualport.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>USED_IN_ipstatic</spirit:userFileType>
+      </spirit:file>
+      <spirit:file>
+        <spirit:name>hdl/ramb18_sdp.v</spirit:name>
+        <spirit:fileType>verilogSource</spirit:fileType>
+        <spirit:userFileType>USED_IN_ipstatic</spirit:userFileType>
+      </spirit:file>
     </spirit:fileSet>
     <spirit:fileSet>
       <spirit:name>xilinx_xpgui_view_fileset</spirit:name>
       <spirit:file>
         <spirit:name>xgui/memstream_v1_0.tcl</spirit:name>
         <spirit:fileType>tclSource</spirit:fileType>
-        <spirit:userFileType>CHECKSUM_92c3ebfc</spirit:userFileType>
+        <spirit:userFileType>CHECKSUM_d714c73b</spirit:userFileType>
         <spirit:userFileType>XGUI_VERSION_2</spirit:userFileType>
       </spirit:file>
     </spirit:fileSet>
@@ -1034,49 +1092,36 @@
       <spirit:name>Component_Name</spirit:name>
       <spirit:value spirit:resolve="user" spirit:id="PARAM_VALUE.Component_Name" spirit:order="1">memstream_v1_0</spirit:value>
     </spirit:parameter>
+    <spirit:parameter>
+      <spirit:name>RAM_STYLE</spirit:name>
+      <spirit:displayName>Ram Style</spirit:displayName>
+      <spirit:value spirit:resolve="user" spirit:id="PARAM_VALUE.RAM_STYLE" spirit:choiceRef="choice_list_44c459b8">auto</spirit:value>
+    </spirit:parameter>
   </spirit:parameters>
   <spirit:vendorExtensions>
     <xilinx:coreExtensions>
       <xilinx:supportedFamilies>
         <xilinx:family xilinx:lifeCycle="Production">zynq</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">artix7</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">artix7l</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">kintex7</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">kintex7l</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">kintexu</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">kintexuplus</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">spartan7</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">aartix7</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">aspartan7</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">azynq</xilinx:family>
-        <xilinx:family xilinx:lifeCycle="Beta">zynquplus</xilinx:family>
+        <xilinx:family xilinx:lifeCycle="Production">zynquplus</xilinx:family>
         <xilinx:family xilinx:lifeCycle="Production">virtexuplus</xilinx:family>
+        <xilinx:family xilinx:lifeCycle="Beta">virtexuplusHBM</xilinx:family>
       </xilinx:supportedFamilies>
       <xilinx:taxonomies>
         <xilinx:taxonomy>/UserIP</xilinx:taxonomy>
       </xilinx:taxonomies>
       <xilinx:displayName>memstream_v1_0</xilinx:displayName>
+      <xilinx:autoFamilySupportLevel>level_0</xilinx:autoFamilySupportLevel>
       <xilinx:definitionSource>package_project</xilinx:definitionSource>
-      <xilinx:coreRevision>2</xilinx:coreRevision>
-      <xilinx:coreCreationDateTime>2019-11-04T19:37:20Z</xilinx:coreCreationDateTime>
-      <xilinx:tags>
-        <xilinx:tag xilinx:name="nopcore"/>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@7a3d79be_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@6ca546af_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@2bb0c52f_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@1f6f8fe4_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@79ecbc44_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@22fd683_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-        <xilinx:tag xilinx:name="ui.data.coregen.dd@2c00346d_ARCHIVE_LOCATION">c:/Users/lucianp/Documents/git/finn-rtllib/memstream</xilinx:tag>
-      </xilinx:tags>
+      <xilinx:coreRevision>10</xilinx:coreRevision>
+      <xilinx:coreCreationDateTime>2020-09-17T18:04:10Z</xilinx:coreCreationDateTime>
     </xilinx:coreExtensions>
     <xilinx:packagingInfo>
-      <xilinx:xilinxVersion>2019.1.3</xilinx:xilinxVersion>
+      <xilinx:xilinxVersion>2020.1</xilinx:xilinxVersion>
       <xilinx:checksum xilinx:scope="busInterfaces" xilinx:value="6d8b2551"/>
-      <xilinx:checksum xilinx:scope="fileGroups" xilinx:value="5e0c4694"/>
+      <xilinx:checksum xilinx:scope="fileGroups" xilinx:value="629ffc9d"/>
       <xilinx:checksum xilinx:scope="ports" xilinx:value="cabd7433"/>
-      <xilinx:checksum xilinx:scope="hdlParameters" xilinx:value="f63127c8"/>
-      <xilinx:checksum xilinx:scope="parameters" xilinx:value="5365a08b"/>
+      <xilinx:checksum xilinx:scope="hdlParameters" xilinx:value="29c70cc4"/>
+      <xilinx:checksum xilinx:scope="parameters" xilinx:value="858b58f8"/>
     </xilinx:packagingInfo>
   </spirit:vendorExtensions>
 </spirit:component>
diff --git a/finn-rtllib/memstream/hdl/memstream.v b/finn-rtllib/memstream/hdl/memstream.v
index 28acb301a583f7437c580744bae7bdc4aef76337..961103e4ca1261ab0109ad9db291a1a66f9c0915 100644
--- a/finn-rtllib/memstream/hdl/memstream.v
+++ b/finn-rtllib/memstream/hdl/memstream.v
@@ -109,359 +109,141 @@ module memstream
 
 );
 
-//calculate number of RAMB18 blocks we need depth-wise
-localparam NMEMBLOCKS = (MEM_DEPTH+1023) / 1024; //ceil(MEM_DEPTH/1024)
-
-//calculate width of address for each block
-localparam BLOCKADRWIDTH = NMEMBLOCKS > 1 ? 10 : $clog2(MEM_DEPTH);
-
-//determine whether a stream needs to multiplex between memory blocks
-localparam STRM0_MUX = ((STRM0_OFFSET/1024) != ((STRM0_OFFSET+STRM0_DEPTH)/1024));
-localparam STRM1_MUX = ((STRM1_OFFSET/1024) != ((STRM1_OFFSET+STRM1_DEPTH)/1024));
-localparam STRM2_MUX = ((STRM2_OFFSET/1024) != ((STRM2_OFFSET+STRM2_DEPTH)/1024));
-localparam STRM3_MUX = ((STRM3_OFFSET/1024) != ((STRM3_OFFSET+STRM3_DEPTH)/1024));
-localparam STRM4_MUX = ((STRM4_OFFSET/1024) != ((STRM4_OFFSET+STRM4_DEPTH)/1024));
-localparam STRM5_MUX = ((STRM5_OFFSET/1024) != ((STRM5_OFFSET+STRM5_DEPTH)/1024));
-
-//determine what the base block of each stream is
-localparam STRM0_BLOCK = (STRM0_OFFSET/1024);
-localparam STRM1_BLOCK = (STRM1_OFFSET/1024);
-localparam STRM2_BLOCK = (STRM2_OFFSET/1024);
-localparam STRM3_BLOCK = (STRM3_OFFSET/1024);
-localparam STRM4_BLOCK = (STRM4_OFFSET/1024);
-localparam STRM5_BLOCK = (STRM5_OFFSET/1024);
-
-//determine what the end block of each stream is
-localparam STRM0_END_BLOCK = ((STRM0_OFFSET+STRM0_DEPTH-1)/1024);
-localparam STRM1_END_BLOCK = ((STRM1_OFFSET+STRM1_DEPTH-1)/1024);
-localparam STRM2_END_BLOCK = ((STRM2_OFFSET+STRM2_DEPTH-1)/1024);
-localparam STRM3_END_BLOCK = ((STRM3_OFFSET+STRM3_DEPTH-1)/1024);
-localparam STRM4_END_BLOCK = ((STRM4_OFFSET+STRM4_DEPTH-1)/1024);
-localparam STRM5_END_BLOCK = ((STRM5_OFFSET+STRM5_DEPTH-1)/1024);
-
-//determine the number of blocks spanned by each stream
-localparam STRM0_NBLOCKS = STRM0_END_BLOCK - STRM0_BLOCK + 1;
-localparam STRM1_NBLOCKS = STRM1_END_BLOCK - STRM1_BLOCK + 1;
-localparam STRM2_NBLOCKS = STRM2_END_BLOCK - STRM2_BLOCK + 1;
-localparam STRM3_NBLOCKS = STRM3_END_BLOCK - STRM3_BLOCK + 1;
-localparam STRM4_NBLOCKS = STRM4_END_BLOCK - STRM4_BLOCK + 1;
-localparam STRM5_NBLOCKS = STRM5_END_BLOCK - STRM5_BLOCK + 1;
-
-//TODO: check that memory width is equal to the widest stream
-//TODO: check that the stream depths and offsets make sense, and that the memory depth is sufficient (or calculate depth here?)
-initial begin
-    if((NSTREAMS < 1) | (NSTREAMS > 6)) begin
-        $display("Invalid setting for NSTREAMS, please set in range [1,6]");
-        $finish();
-    end
-end
-
-//invert reset
-wire rst;
-assign rst = ~aresetn;
-
-//WARNING: pipeline depth is larger than the number of streams per port so we have in-flight writes that may see not-ready when they get executed
-//solution: use prog-full to make sure we have an equal number of free slots in the stream to the read pipeline depth
-
-reg [$clog2(MEM_DEPTH)-1:0] strm0_addr = STRM0_OFFSET;
-reg [$clog2(MEM_DEPTH)-1:0] strm1_addr = STRM1_OFFSET;
-reg [$clog2(MEM_DEPTH)-1:0] strm2_addr = STRM2_OFFSET;
-reg [$clog2(MEM_DEPTH)-1:0] strm3_addr = STRM3_OFFSET;
-reg [$clog2(MEM_DEPTH)-1:0] strm4_addr = STRM4_OFFSET;
-reg [$clog2(MEM_DEPTH)-1:0] strm5_addr = STRM5_OFFSET;
-
-reg strm0_incr_en;
-reg strm1_incr_en;
-reg strm2_incr_en;
-reg strm3_incr_en;
-reg strm4_incr_en;
-reg strm5_incr_en;
-
-wire strm0_rst;
-wire strm1_rst;
-wire strm2_rst;
-wire strm3_rst;
-wire strm4_rst;
-wire strm5_rst;
-
-reg strm0_ready;
-reg strm1_ready;
-reg strm2_ready;
-reg strm3_ready;
-reg strm4_ready;
-reg strm5_ready;
-
-//arbiter: work on one stream at a time
-//multiplex each port between (up to) half of the streams
-reg [1:0] current_stream_porta = 0;
-reg [1:0] current_stream_portb = 0;
-
-always @(posedge aclk) begin
-    if(rst)
-        current_stream_porta <= 0;
-    else case(current_stream_porta)
-        0: current_stream_porta <= strm2_ready ? 1 : strm4_ready ? 2 : 0;
-        1: current_stream_porta <= strm4_ready ? 2 : strm0_ready ? 0 : 1;
-        2: current_stream_porta <= strm0_ready ? 0 : strm2_ready ? 1 : 2;
-    endcase
-    if(rst)
-        current_stream_portb <= 0;
-    else case(current_stream_portb)
-        0: current_stream_portb <= strm3_ready ? 1 : strm5_ready ? 2 : 0;
-        1: current_stream_portb <= strm5_ready ? 2 : strm1_ready ? 0 : 1;
-        2: current_stream_portb <= strm1_ready ? 0 : strm3_ready ? 1 : 2;
-    endcase
-end
+generate
+if(NSTREAMS <= 2) begin: singleblock
 
-always @(posedge aclk) begin
-    if(rst) begin
-        strm0_incr_en <= 0;
-        strm1_incr_en <= 0;
-        strm2_incr_en <= 0;
-        strm3_incr_en <= 0;
-        strm4_incr_en <= 0;
-        strm5_incr_en <= 0;
-    end else begin
-        strm0_incr_en <= (current_stream_porta == 0) & strm0_ready;
-        strm1_incr_en <= (current_stream_portb == 0) & strm1_ready;
-        strm2_incr_en <= (current_stream_porta == 1) & strm2_ready;
-        strm3_incr_en <= (current_stream_portb == 1) & strm3_ready;
-        strm4_incr_en <= (current_stream_porta == 2) & strm4_ready;
-        strm5_incr_en <= (current_stream_portb == 2) & strm5_ready;
-    end
-end
-
-assign strm0_rst = strm0_incr_en & (strm0_addr == (STRM0_OFFSET + STRM0_DEPTH-1));
-assign strm1_rst = strm1_incr_en & (strm1_addr == (STRM1_OFFSET + STRM1_DEPTH-1));
-assign strm2_rst = strm2_incr_en & (strm2_addr == (STRM2_OFFSET + STRM2_DEPTH-1));
-assign strm3_rst = strm3_incr_en & (strm3_addr == (STRM3_OFFSET + STRM3_DEPTH-1));
-assign strm4_rst = strm4_incr_en & (strm4_addr == (STRM4_OFFSET + STRM4_DEPTH-1));
-assign strm5_rst = strm5_incr_en & (strm5_addr == (STRM5_OFFSET + STRM5_DEPTH-1));
-
-always @(posedge aclk) begin
-    strm0_ready <= ~m_axis_0_afull;
-    strm1_ready <= ~m_axis_1_afull & (NSTREAMS >= 2);
-    strm2_ready <= ~m_axis_2_afull & (NSTREAMS >= 3);
-    strm3_ready <= ~m_axis_3_afull & (NSTREAMS >= 4);
-    strm4_ready <= ~m_axis_4_afull & (NSTREAMS >= 5);
-    strm5_ready <= ~m_axis_5_afull & (NSTREAMS >= 6);
-end
 
-//one address counter per stream; more LUTs but keeps routing short and local
-always @(posedge aclk) begin
-    if(strm0_rst | rst)
-        strm0_addr <= STRM0_OFFSET;
-    else if(strm0_incr_en)
-        strm0_addr <= strm0_addr + 1;
-    if(strm1_rst | rst)
-        strm1_addr <= STRM1_OFFSET;
-    else if(strm1_incr_en)
-        strm1_addr <= strm1_addr + 1;
-    if(strm2_rst | rst)
-        strm2_addr <= STRM2_OFFSET;
-    else if(strm2_incr_en)
-        strm2_addr <= strm2_addr + 1;
-    if(strm3_rst | rst)
-        strm3_addr <= STRM3_OFFSET;
-    else if(strm3_incr_en)
-        strm3_addr <= strm3_addr + 1;
-    if(strm4_rst | rst)
-        strm4_addr <= STRM4_OFFSET;
-    else if(strm4_incr_en)
-        strm4_addr <= strm4_addr + 1;
-    if(strm5_rst | rst)
-        strm5_addr <= STRM5_OFFSET;
-    else if(strm5_incr_en)
-        strm5_addr <= strm5_addr + 1;
-end
-
-reg [$clog2(MEM_DEPTH)-1:0] addra;
-wire [MEM_WIDTH*NMEMBLOCKS-1:0] rdqa;
-
-reg [$clog2(MEM_DEPTH)-1:0] addrb;
-wire [MEM_WIDTH*NMEMBLOCKS-1:0] rdqb;
-
-wire [NMEMBLOCKS-1:0] we;
-
-reg [1:0] addr_select_porta;
-reg [1:0] addr_select_portb;
-
-//multiplex addresses of various streams into address ports of memory
-always @(posedge aclk) begin
-    addr_select_porta <= current_stream_porta;
-    case(addr_select_porta)
-        0: addra <= strm0_addr;
-        1: addra <= strm2_addr;
-        2: addra <= strm4_addr;
-    endcase
-    addr_select_portb <= current_stream_portb;
-    case(addr_select_portb)
-        0: addrb <= strm1_addr;
-        1: addrb <= strm3_addr;
-        2: addrb <= strm5_addr;
-    endcase
-end
+memstream_singleblock
+#(
+    .CONFIG_EN(CONFIG_EN),
+    .NSTREAMS(NSTREAMS),
+    .MEM_DEPTH(MEM_DEPTH),
+    .MEM_WIDTH(MEM_WIDTH),
+    .MEM_INIT(MEM_INIT),
+    .RAM_STYLE(RAM_STYLE),
 
-genvar g;
-generate for(g=0; g<NMEMBLOCKS; g=g+1) begin: blockports
+    //widths per stream
+    .STRM0_WIDTH(STRM0_WIDTH),
+    .STRM1_WIDTH(STRM1_WIDTH),
 
-assign we[g] = (CONFIG_EN == 1) & config_ce & config_we & (config_address[31:BLOCKADRWIDTH] == g);
+    //depths per stream
+    .STRM0_DEPTH(STRM0_DEPTH),
+    .STRM1_DEPTH(STRM1_DEPTH),
 
-ramb18_wf_dualport
-#(
-    .ID(g),
-	.DWIDTH(MEM_WIDTH),
-	.AWIDTH(BLOCKADRWIDTH),
-	.MEM_INIT(MEM_INIT),
-  .RAM_STYLE(RAM_STYLE)
+    //offsets for each stream
+    .STRM0_OFFSET(STRM0_OFFSET),
+    .STRM1_OFFSET(STRM1_OFFSET)
 )
-ram
+mem
 (
-	.clk(aclk),
-
-	.wea(we[g]),
-	.addra(we[g] ? config_address[BLOCKADRWIDTH-1:0] : addra[BLOCKADRWIDTH-1:0]),
-	.wdataa(config_d0),
-	.rdqa(rdqa[(g+1)*MEM_WIDTH-1:g*MEM_WIDTH]),
-
-	.web(1'b0),
-	.addrb(addrb[BLOCKADRWIDTH-1:0]),
-	.wdatab('d0),
-	.rdqb(rdqb[(g+1)*MEM_WIDTH-1:g*MEM_WIDTH])
+    .aclk(aclk),
+    .aresetn(aresetn),
+
+    .config_address(config_address),
+    .config_ce(config_ce),
+    .config_we(config_we),
+    .config_d0(config_d0),
+    .config_q0(config_q0),
+
+    .m_axis_0_tready(m_axis_0_tready),
+    .m_axis_0_tvalid(m_axis_0_tvalid),
+    .m_axis_0_tdata(m_axis_0_tdata),
+
+    .m_axis_1_tready(m_axis_1_tready),
+    .m_axis_1_tvalid(m_axis_1_tvalid),
+    .m_axis_1_tdata(m_axis_1_tdata)
 );
 
-end
-endgenerate
-
-integer i;
-
-generate if(NMEMBLOCKS > 1) begin: multiblock
-
-wire [MEM_WIDTH-1:0] rdqmux[5:0];
-
-reg [$clog2(MEM_DEPTH)-BLOCKADRWIDTH-1:0] rdblocka[2:0];
-reg [$clog2(MEM_DEPTH)-BLOCKADRWIDTH-1:0] rdblockb[2:0];
-
-always @(posedge aclk) begin
-    rdblocka[0] <= addra[$clog2(MEM_DEPTH)-1:BLOCKADRWIDTH];
-    rdblockb[0] <= addrb[$clog2(MEM_DEPTH)-1:BLOCKADRWIDTH];
-    for(i=0; i<2; i=i+1) begin
-		rdblocka[i+1] <= rdblocka[i];
-		rdblockb[i+1] <= rdblockb[i];
-    end
-end
-
-if(NSTREAMS >= 1) begin: en_strm0
-	if(STRM0_MUX == 1) begin: mux0
-		mux #(STRM0_NBLOCKS, MEM_WIDTH) m(rdqa[(STRM0_BLOCK+STRM0_NBLOCKS)*MEM_WIDTH-1:STRM0_BLOCK*MEM_WIDTH],rdqmux[0],rdblocka[1] - STRM0_BLOCK);
-	end else begin: nomux0
-		assign rdqmux[0] = rdqa[(STRM0_BLOCK+1)*MEM_WIDTH-1:STRM0_BLOCK*MEM_WIDTH];
-	end
-	assign m_axis_0_tdata = rdqmux[0][STRM0_WIDTH-1:0];
-end
-
-if(NSTREAMS >= 2) begin: en_strm1
-	if(STRM1_MUX == 1) begin: mux1
-		mux #(STRM1_NBLOCKS, MEM_WIDTH) m(rdqb[(STRM1_BLOCK+STRM1_NBLOCKS)*MEM_WIDTH-1:STRM1_BLOCK*MEM_WIDTH],rdqmux[1],rdblockb[1] - STRM1_BLOCK);
-	end else begin: nomux1
-		assign rdqmux[1] = rdqb[(STRM1_BLOCK+1)*MEM_WIDTH-1:STRM1_BLOCK*MEM_WIDTH];
-	end
-	assign m_axis_1_tdata = rdqmux[1][STRM1_WIDTH-1:0];
-end
+assign m_axis_2_tvalid = 0;
+assign m_axis_2_tdata = 0;
+assign m_axis_3_tvalid = 0;
+assign m_axis_3_tdata = 0;
+assign m_axis_4_tvalid = 0;
+assign m_axis_4_tdata = 0;
+assign m_axis_5_tvalid = 0;
+assign m_axis_5_tdata = 0;
 
-if(NSTREAMS >= 3) begin: en_strm2
-	if(STRM2_MUX == 1) begin: mux2
-		mux #(STRM2_NBLOCKS, MEM_WIDTH) m(rdqa[(STRM2_BLOCK+STRM2_NBLOCKS)*MEM_WIDTH-1:STRM2_BLOCK*MEM_WIDTH],rdqmux[2],rdblocka[1] - STRM2_BLOCK);
-	end else begin: nomux2
-		assign rdqmux[2] = rdqa[(STRM2_BLOCK+1)*MEM_WIDTH-1:STRM2_BLOCK*MEM_WIDTH];
-	end
-	assign m_axis_2_tdata = rdqmux[2][STRM2_WIDTH-1:0];
-end
+end else begin: multiblock
 
-if(NSTREAMS >= 4) begin: en_strm3
-	if(STRM3_MUX == 1) begin: mux3
-		mux #(STRM3_NBLOCKS, MEM_WIDTH) m(rdqb[(STRM3_BLOCK+STRM3_NBLOCKS)*MEM_WIDTH-1:STRM3_BLOCK*MEM_WIDTH],rdqmux[3],rdblockb[1] - STRM3_BLOCK);
-	end else begin: nomux3
-		assign rdqmux[3] = rdqb[(STRM3_BLOCK+1)*MEM_WIDTH-1:STRM3_BLOCK*MEM_WIDTH];
-	end
-	assign m_axis_3_tdata = rdqmux[3][STRM3_WIDTH-1:0];
-end
 
-if(NSTREAMS >= 5) begin: en_strm4
-	if(STRM4_MUX == 1) begin: mux4
-		mux #(STRM4_NBLOCKS, MEM_WIDTH) m(rdqa[(STRM4_BLOCK+STRM4_NBLOCKS)*MEM_WIDTH-1:STRM4_BLOCK*MEM_WIDTH],rdqmux[4],rdblocka[1] - STRM4_BLOCK);
-	end else begin: nomux4
-		assign rdqmux[4] = rdqa[(STRM4_BLOCK+1)*MEM_WIDTH-1:STRM4_BLOCK*MEM_WIDTH];
-	end
-	assign m_axis_4_tdata = rdqmux[4][STRM4_WIDTH-1:0];
-end
+memstream_multiblock
+#(
+    .CONFIG_EN(CONFIG_EN),
+    .NSTREAMS(NSTREAMS),
+    .MEM_DEPTH(MEM_DEPTH),
+    .MEM_WIDTH(MEM_WIDTH),
+    .MEM_INIT(MEM_INIT),
+    .RAM_STYLE(RAM_STYLE),
 
-if(NSTREAMS >= 6) begin: en_strm5
-	if(STRM5_MUX == 1) begin: mux5
-		mux #(STRM5_NBLOCKS, MEM_WIDTH) m(rdqb[(STRM5_BLOCK+STRM5_NBLOCKS)*MEM_WIDTH-1:STRM5_BLOCK*MEM_WIDTH],rdqmux[5],rdblockb[1] - STRM5_BLOCK);
-	end else begin: nomux5
-		assign rdqmux[5] = rdqb[(STRM5_BLOCK+1)*MEM_WIDTH-1:STRM5_BLOCK*MEM_WIDTH];
-	end
-	assign m_axis_5_tdata = rdqmux[5][STRM5_WIDTH-1:0];
-end
+    //widths per stream
+    .STRM0_WIDTH(STRM0_WIDTH),
+    .STRM1_WIDTH(STRM1_WIDTH),
+    .STRM2_WIDTH(STRM2_WIDTH),
+    .STRM3_WIDTH(STRM3_WIDTH),
+    .STRM4_WIDTH(STRM4_WIDTH),
+    .STRM5_WIDTH(STRM5_WIDTH),
+
+    //depths per stream
+    .STRM0_DEPTH(STRM0_DEPTH),
+    .STRM1_DEPTH(STRM1_DEPTH),
+    .STRM2_DEPTH(STRM2_DEPTH),
+    .STRM3_DEPTH(STRM3_DEPTH),
+    .STRM4_DEPTH(STRM4_DEPTH),
+    .STRM5_DEPTH(STRM5_DEPTH),
+
+    //offsets for each stream
+    .STRM0_OFFSET(STRM0_OFFSET),
+    .STRM1_OFFSET(STRM1_OFFSET),
+    .STRM2_OFFSET(STRM2_OFFSET),
+    .STRM3_OFFSET(STRM3_OFFSET),
+    .STRM4_OFFSET(STRM4_OFFSET),
+    .STRM5_OFFSET(STRM5_OFFSET)
+)
+mem
+(
+    .aclk(aclk),
+    .aresetn(aresetn),
+
+    .config_address(config_address),
+    .config_ce(config_ce),
+    .config_we(config_we),
+    .config_d0(config_d0),
+    .config_q0(config_q0),
+
+    .m_axis_0_afull(m_axis_0_afull),
+    .m_axis_0_tready(m_axis_0_tready),
+    .m_axis_0_tvalid(m_axis_0_tvalid),
+    .m_axis_0_tdata(m_axis_0_tdata),
+
+    .m_axis_1_afull(m_axis_1_afull),
+    .m_axis_1_tready(m_axis_1_tready),
+    .m_axis_1_tvalid(m_axis_1_tvalid),
+    .m_axis_1_tdata(m_axis_1_tdata),
+
+    .m_axis_2_afull(m_axis_2_afull),
+    .m_axis_2_tready(m_axis_2_tready),
+    .m_axis_2_tvalid(m_axis_2_tvalid),
+    .m_axis_2_tdata(m_axis_2_tdata),
+
+    .m_axis_3_afull(m_axis_3_afull),
+    .m_axis_3_tready(m_axis_3_tready),
+    .m_axis_3_tvalid(m_axis_3_tvalid),
+    .m_axis_3_tdata(m_axis_3_tdata),
+
+    .m_axis_4_afull(m_axis_4_afull),
+    .m_axis_4_tready(m_axis_4_tready),
+    .m_axis_4_tvalid(m_axis_4_tvalid),
+    .m_axis_4_tdata(m_axis_4_tdata),
+
+    .m_axis_5_afull(m_axis_5_afull),
+    .m_axis_5_tready(m_axis_5_tready),
+    .m_axis_5_tvalid(m_axis_5_tvalid),
+    .m_axis_5_tdata(m_axis_5_tdata)
 
-end else begin: singleblock
+);
 
-if(NSTREAMS >= 1) begin: en_strm0_direct
-    assign m_axis_0_tdata = rdqa[STRM0_WIDTH-1:0];
-end
-if(NSTREAMS >= 2) begin: en_strm1_direct
-	assign m_axis_1_tdata = rdqb[STRM1_WIDTH-1:0];
-end
-if(NSTREAMS >= 3) begin: en_strm2_direct
-	assign m_axis_2_tdata = rdqa[STRM2_WIDTH-1:0];
-end
-if(NSTREAMS >= 4) begin: en_strm3_direct
-	assign m_axis_3_tdata = rdqb[STRM3_WIDTH-1:0];
-end
-if(NSTREAMS >= 5) begin: en_strm4_direct
-	assign m_axis_4_tdata = rdqa[STRM4_WIDTH-1:0];
-end
-if(NSTREAMS >= 6) begin: en_strm5_direct
-	assign m_axis_5_tdata = rdqb[STRM5_WIDTH-1:0];
-end
 
 end
 endgenerate
 
-//output to AXI Streams
-reg tvalid_pipe0[2:0];
-reg tvalid_pipe1[2:0];
-reg tvalid_pipe2[2:0];
-reg tvalid_pipe3[2:0];
-reg tvalid_pipe4[2:0];
-reg tvalid_pipe5[2:0];
-
-assign m_axis_0_tvalid = tvalid_pipe0[2];
-assign m_axis_1_tvalid = tvalid_pipe1[2];
-assign m_axis_2_tvalid = tvalid_pipe2[2];
-assign m_axis_3_tvalid = tvalid_pipe3[2];
-assign m_axis_4_tvalid = tvalid_pipe4[2];
-assign m_axis_5_tvalid = tvalid_pipe5[2];
-
-
-always @(posedge aclk) begin
-    tvalid_pipe0[0] <= strm0_incr_en;
-    tvalid_pipe1[0] <= strm1_incr_en;
-    tvalid_pipe2[0] <= strm2_incr_en;
-    tvalid_pipe3[0] <= strm3_incr_en;
-    tvalid_pipe4[0] <= strm4_incr_en;
-    tvalid_pipe5[0] <= strm5_incr_en;
-    for(i=0; i<2; i=i+1) begin: srl
-        tvalid_pipe0[i+1] <= tvalid_pipe0[i];
-        tvalid_pipe1[i+1] <= tvalid_pipe1[i];
-        tvalid_pipe2[i+1] <= tvalid_pipe2[i];
-        tvalid_pipe3[i+1] <= tvalid_pipe3[i];
-        tvalid_pipe4[i+1] <= tvalid_pipe4[i];
-        tvalid_pipe5[i+1] <= tvalid_pipe5[i];
-    end
-end
-
-assign config_q0 = 0;
-
 endmodule
diff --git a/finn-rtllib/memstream/hdl/memstream_multiblock.v b/finn-rtllib/memstream/hdl/memstream_multiblock.v
new file mode 100644
index 0000000000000000000000000000000000000000..017088b8c1572bb3baa2a5a46336509187a762ab
--- /dev/null
+++ b/finn-rtllib/memstream/hdl/memstream_multiblock.v
@@ -0,0 +1,471 @@
+/*
+ Copyright (c) 2020, Xilinx
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+
+ * Neither the name of FINN nor the names of its
+   contributors may be used to endorse or promote products derived from
+   this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+module memstream_multiblock
+#(
+//parameters to enable/disable axi-mm, set number of streams, set readmemh for memory, set per-stream offsets in memory, set per-stream widths
+    parameter CONFIG_EN = 1,
+    parameter NSTREAMS = 6,//1 up to 6
+
+    parameter MEM_DEPTH = 13824,
+    parameter MEM_WIDTH = 32,
+    parameter MEM_INIT = "./",
+    parameter RAM_STYLE = "auto",
+
+    //widths per stream
+	parameter STRM0_WIDTH = 32,
+	parameter STRM1_WIDTH = 32,
+	parameter STRM2_WIDTH = 32,
+	parameter STRM3_WIDTH = 32,
+	parameter STRM4_WIDTH = 32,
+	parameter STRM5_WIDTH = 32,
+
+	//depths per stream
+	parameter STRM0_DEPTH = 2304,
+	parameter STRM1_DEPTH = 2304,
+	parameter STRM2_DEPTH = 2304,
+	parameter STRM3_DEPTH = 2304,
+	parameter STRM4_DEPTH = 2304,
+	parameter STRM5_DEPTH = 2304,
+
+	//offsets for each stream
+	parameter STRM0_OFFSET = 0,
+	parameter STRM1_OFFSET = 2304,
+	parameter STRM2_OFFSET = 4608,
+	parameter STRM3_OFFSET = 6912,
+	parameter STRM4_OFFSET = 9216,
+	parameter STRM5_OFFSET = 11520
+)
+
+(
+    input aclk,
+    input aresetn,
+
+    //optional configuration interface compatible with ap_memory
+	input [31:0] config_address,
+	input config_ce,
+	input config_we,
+	input [31:0] config_d0,
+	output [31:0] config_q0,
+
+    //multiple output AXI Streams, TDATA width rounded to multiple of 8 bits
+    input m_axis_0_afull,
+    input m_axis_0_tready,
+    output m_axis_0_tvalid,
+    output [((STRM0_WIDTH+7)/8)*8-1:0] m_axis_0_tdata,
+
+    input m_axis_1_afull,
+    input m_axis_1_tready,
+    output m_axis_1_tvalid,
+    output [((STRM1_WIDTH+7)/8)*8-1:0] m_axis_1_tdata,
+
+    input m_axis_2_afull,
+    input m_axis_2_tready,
+    output m_axis_2_tvalid,
+    output [((STRM2_WIDTH+7)/8)*8-1:0] m_axis_2_tdata,
+
+    input m_axis_3_afull,
+    input m_axis_3_tready,
+    output m_axis_3_tvalid,
+    output [((STRM3_WIDTH+7)/8)*8-1:0] m_axis_3_tdata,
+
+    input m_axis_4_afull,
+    input m_axis_4_tready,
+    output m_axis_4_tvalid,
+    output [((STRM4_WIDTH+7)/8)*8-1:0] m_axis_4_tdata,
+
+    input m_axis_5_afull,
+    input m_axis_5_tready,
+    output m_axis_5_tvalid,
+    output [((STRM5_WIDTH+7)/8)*8-1:0] m_axis_5_tdata
+
+
+);
+
+//calculate number of RAMB18 blocks we need depth-wise
+localparam NMEMBLOCKS = (MEM_DEPTH+1023) / 1024; //ceil(MEM_DEPTH/1024)
+
+//calculate width of address for each block
+localparam BLOCKADRWIDTH = NMEMBLOCKS > 1 ? 10 : $clog2(MEM_DEPTH);
+
+//determine whether a stream needs to multiplex between memory blocks
+localparam STRM0_MUX = ((STRM0_OFFSET/1024) != ((STRM0_OFFSET+STRM0_DEPTH)/1024));
+localparam STRM1_MUX = ((STRM1_OFFSET/1024) != ((STRM1_OFFSET+STRM1_DEPTH)/1024));
+localparam STRM2_MUX = ((STRM2_OFFSET/1024) != ((STRM2_OFFSET+STRM2_DEPTH)/1024));
+localparam STRM3_MUX = ((STRM3_OFFSET/1024) != ((STRM3_OFFSET+STRM3_DEPTH)/1024));
+localparam STRM4_MUX = ((STRM4_OFFSET/1024) != ((STRM4_OFFSET+STRM4_DEPTH)/1024));
+localparam STRM5_MUX = ((STRM5_OFFSET/1024) != ((STRM5_OFFSET+STRM5_DEPTH)/1024));
+
+//determine what the base block of each stream is
+localparam STRM0_BLOCK = (STRM0_OFFSET/1024);
+localparam STRM1_BLOCK = (STRM1_OFFSET/1024);
+localparam STRM2_BLOCK = (STRM2_OFFSET/1024);
+localparam STRM3_BLOCK = (STRM3_OFFSET/1024);
+localparam STRM4_BLOCK = (STRM4_OFFSET/1024);
+localparam STRM5_BLOCK = (STRM5_OFFSET/1024);
+
+//determine what the end block of each stream is
+localparam STRM0_END_BLOCK = ((STRM0_OFFSET+STRM0_DEPTH-1)/1024);
+localparam STRM1_END_BLOCK = ((STRM1_OFFSET+STRM1_DEPTH-1)/1024);
+localparam STRM2_END_BLOCK = ((STRM2_OFFSET+STRM2_DEPTH-1)/1024);
+localparam STRM3_END_BLOCK = ((STRM3_OFFSET+STRM3_DEPTH-1)/1024);
+localparam STRM4_END_BLOCK = ((STRM4_OFFSET+STRM4_DEPTH-1)/1024);
+localparam STRM5_END_BLOCK = ((STRM5_OFFSET+STRM5_DEPTH-1)/1024);
+
+//determine the number of blocks spanned by each stream
+localparam STRM0_NBLOCKS = STRM0_END_BLOCK - STRM0_BLOCK + 1;
+localparam STRM1_NBLOCKS = STRM1_END_BLOCK - STRM1_BLOCK + 1;
+localparam STRM2_NBLOCKS = STRM2_END_BLOCK - STRM2_BLOCK + 1;
+localparam STRM3_NBLOCKS = STRM3_END_BLOCK - STRM3_BLOCK + 1;
+localparam STRM4_NBLOCKS = STRM4_END_BLOCK - STRM4_BLOCK + 1;
+localparam STRM5_NBLOCKS = STRM5_END_BLOCK - STRM5_BLOCK + 1;
+
+//TODO: check that memory width is equal to the widest stream
+//TODO: check that the stream depths and offsets make sense, and that the memory depth is sufficient (or calculate depth here?)
+initial begin
+    if((NSTREAMS < 1) | (NSTREAMS > 6)) begin
+        $display("Invalid setting for NSTREAMS, please set in range [1,6]");
+        $finish();
+    end
+end
+
+//invert reset
+wire rst;
+assign rst = ~aresetn;
+
+//WARNING: pipeline depth is larger than the number of streams per port so we have in-flight writes that may see not-ready when they get executed
+//solution: use prog-full to make sure we have an equal number of free slots in the stream to the read pipeline depth
+
+reg [$clog2(MEM_DEPTH)-1:0] strm0_addr = STRM0_OFFSET;
+reg [$clog2(MEM_DEPTH)-1:0] strm1_addr = STRM1_OFFSET;
+reg [$clog2(MEM_DEPTH)-1:0] strm2_addr = STRM2_OFFSET;
+reg [$clog2(MEM_DEPTH)-1:0] strm3_addr = STRM3_OFFSET;
+reg [$clog2(MEM_DEPTH)-1:0] strm4_addr = STRM4_OFFSET;
+reg [$clog2(MEM_DEPTH)-1:0] strm5_addr = STRM5_OFFSET;
+
+reg strm0_incr_en;
+reg strm1_incr_en;
+reg strm2_incr_en;
+reg strm3_incr_en;
+reg strm4_incr_en;
+reg strm5_incr_en;
+
+wire strm0_rst;
+wire strm1_rst;
+wire strm2_rst;
+wire strm3_rst;
+wire strm4_rst;
+wire strm5_rst;
+
+reg strm0_ready;
+reg strm1_ready;
+reg strm2_ready;
+reg strm3_ready;
+reg strm4_ready;
+reg strm5_ready;
+
+//arbiter: work on one stream at a time
+//multiplex each port between (up to) half of the streams
+reg [1:0] current_stream_porta = 0;
+reg [1:0] current_stream_portb = 0;
+
+always @(posedge aclk) begin
+    if(rst)
+        current_stream_porta <= 0;
+    else case(current_stream_porta)
+        0: current_stream_porta <= strm2_ready ? 1 : strm4_ready ? 2 : 0;
+        1: current_stream_porta <= strm4_ready ? 2 : strm0_ready ? 0 : 1;
+        2: current_stream_porta <= strm0_ready ? 0 : strm2_ready ? 1 : 2;
+    endcase
+    if(rst)
+        current_stream_portb <= 0;
+    else case(current_stream_portb)
+        0: current_stream_portb <= strm3_ready ? 1 : strm5_ready ? 2 : 0;
+        1: current_stream_portb <= strm5_ready ? 2 : strm1_ready ? 0 : 1;
+        2: current_stream_portb <= strm1_ready ? 0 : strm3_ready ? 1 : 2;
+    endcase
+end
+
+always @(posedge aclk) begin
+    if(rst) begin
+        strm0_incr_en <= 0;
+        strm1_incr_en <= 0;
+        strm2_incr_en <= 0;
+        strm3_incr_en <= 0;
+        strm4_incr_en <= 0;
+        strm5_incr_en <= 0;
+    end else begin
+        strm0_incr_en <= (current_stream_porta == 0) & strm0_ready;
+        strm1_incr_en <= (current_stream_portb == 0) & strm1_ready;
+        strm2_incr_en <= (current_stream_porta == 1) & strm2_ready;
+        strm3_incr_en <= (current_stream_portb == 1) & strm3_ready;
+        strm4_incr_en <= (current_stream_porta == 2) & strm4_ready;
+        strm5_incr_en <= (current_stream_portb == 2) & strm5_ready;
+    end
+end
+
+assign strm0_rst = strm0_incr_en & (strm0_addr == (STRM0_OFFSET + STRM0_DEPTH-1));
+assign strm1_rst = strm1_incr_en & (strm1_addr == (STRM1_OFFSET + STRM1_DEPTH-1));
+assign strm2_rst = strm2_incr_en & (strm2_addr == (STRM2_OFFSET + STRM2_DEPTH-1));
+assign strm3_rst = strm3_incr_en & (strm3_addr == (STRM3_OFFSET + STRM3_DEPTH-1));
+assign strm4_rst = strm4_incr_en & (strm4_addr == (STRM4_OFFSET + STRM4_DEPTH-1));
+assign strm5_rst = strm5_incr_en & (strm5_addr == (STRM5_OFFSET + STRM5_DEPTH-1));
+
+always @(posedge aclk) begin
+    strm0_ready <= ~m_axis_0_afull;
+    strm1_ready <= ~m_axis_1_afull & (NSTREAMS >= 2);
+    strm2_ready <= ~m_axis_2_afull & (NSTREAMS >= 3);
+    strm3_ready <= ~m_axis_3_afull & (NSTREAMS >= 4);
+    strm4_ready <= ~m_axis_4_afull & (NSTREAMS >= 5);
+    strm5_ready <= ~m_axis_5_afull & (NSTREAMS >= 6);
+end
+
+//one address counter per stream; more LUTs but keeps routing short and local
+always @(posedge aclk) begin
+    if(strm0_rst | rst)
+        strm0_addr <= STRM0_OFFSET;
+    else if(strm0_incr_en)
+        strm0_addr <= strm0_addr + 1;
+    if(strm1_rst | rst)
+        strm1_addr <= STRM1_OFFSET;
+    else if(strm1_incr_en)
+        strm1_addr <= strm1_addr + 1;
+    if(strm2_rst | rst)
+        strm2_addr <= STRM2_OFFSET;
+    else if(strm2_incr_en)
+        strm2_addr <= strm2_addr + 1;
+    if(strm3_rst | rst)
+        strm3_addr <= STRM3_OFFSET;
+    else if(strm3_incr_en)
+        strm3_addr <= strm3_addr + 1;
+    if(strm4_rst | rst)
+        strm4_addr <= STRM4_OFFSET;
+    else if(strm4_incr_en)
+        strm4_addr <= strm4_addr + 1;
+    if(strm5_rst | rst)
+        strm5_addr <= STRM5_OFFSET;
+    else if(strm5_incr_en)
+        strm5_addr <= strm5_addr + 1;
+end
+
+reg [$clog2(MEM_DEPTH)-1:0] addra;
+wire [MEM_WIDTH*NMEMBLOCKS-1:0] rdqa;
+
+reg [$clog2(MEM_DEPTH)-1:0] addrb;
+wire [MEM_WIDTH*NMEMBLOCKS-1:0] rdqb;
+
+wire [NMEMBLOCKS-1:0] we;
+
+reg [1:0] addr_select_porta;
+reg [1:0] addr_select_portb;
+
+//multiplex addresses of various streams into address ports of memory
+always @(posedge aclk) begin
+    addr_select_porta <= current_stream_porta;
+    case(addr_select_porta)
+        0: addra <= strm0_addr;
+        1: addra <= strm2_addr;
+        2: addra <= strm4_addr;
+    endcase
+    addr_select_portb <= current_stream_portb;
+    case(addr_select_portb)
+        0: addrb <= strm1_addr;
+        1: addrb <= strm3_addr;
+        2: addrb <= strm5_addr;
+    endcase
+end
+
+genvar g;
+generate for(g=0; g<NMEMBLOCKS; g=g+1) begin: blockports
+
+assign we[g] = (CONFIG_EN == 1) & config_ce & config_we & (config_address[31:BLOCKADRWIDTH] == g);
+
+ramb18_wf_dualport
+#(
+    .ID(g),
+	.DWIDTH(MEM_WIDTH),
+	.AWIDTH(BLOCKADRWIDTH),
+	.MEM_INIT(MEM_INIT),
+  .RAM_STYLE(RAM_STYLE)
+)
+ram
+(
+	.clk(aclk),
+
+	.wea(we[g]),
+    .ena(1'b1),
+    .enqa(1'b1),
+	.addra(we[g] ? config_address[BLOCKADRWIDTH-1:0] : addra[BLOCKADRWIDTH-1:0]),
+	.wdataa(config_d0),
+	.rdqa(rdqa[(g+1)*MEM_WIDTH-1:g*MEM_WIDTH]),
+
+	.web(1'b0),
+    .enb(1'b1),
+    .enqb(1'b1),
+	.addrb(addrb[BLOCKADRWIDTH-1:0]),
+	.wdatab('d0),
+	.rdqb(rdqb[(g+1)*MEM_WIDTH-1:g*MEM_WIDTH])
+);
+
+end
+endgenerate
+
+integer i;
+
+generate if(NMEMBLOCKS > 1) begin: multiblock
+
+wire [MEM_WIDTH-1:0] rdqmux[5:0];
+
+reg [$clog2(MEM_DEPTH)-BLOCKADRWIDTH-1:0] rdblocka[2:0];
+reg [$clog2(MEM_DEPTH)-BLOCKADRWIDTH-1:0] rdblockb[2:0];
+
+always @(posedge aclk) begin
+    rdblocka[0] <= addra[$clog2(MEM_DEPTH)-1:BLOCKADRWIDTH];
+    rdblockb[0] <= addrb[$clog2(MEM_DEPTH)-1:BLOCKADRWIDTH];
+    for(i=0; i<2; i=i+1) begin
+		rdblocka[i+1] <= rdblocka[i];
+		rdblockb[i+1] <= rdblockb[i];
+    end
+end
+
+if(NSTREAMS >= 1) begin: en_strm0
+	if(STRM0_MUX == 1) begin: mux0
+		mux #(STRM0_NBLOCKS, MEM_WIDTH) m(rdqa[(STRM0_BLOCK+STRM0_NBLOCKS)*MEM_WIDTH-1:STRM0_BLOCK*MEM_WIDTH],rdqmux[0],rdblocka[1] - STRM0_BLOCK);
+	end else begin: nomux0
+		assign rdqmux[0] = rdqa[(STRM0_BLOCK+1)*MEM_WIDTH-1:STRM0_BLOCK*MEM_WIDTH];
+	end
+	assign m_axis_0_tdata = rdqmux[0][STRM0_WIDTH-1:0];
+end
+
+if(NSTREAMS >= 2) begin: en_strm1
+	if(STRM1_MUX == 1) begin: mux1
+		mux #(STRM1_NBLOCKS, MEM_WIDTH) m(rdqb[(STRM1_BLOCK+STRM1_NBLOCKS)*MEM_WIDTH-1:STRM1_BLOCK*MEM_WIDTH],rdqmux[1],rdblockb[1] - STRM1_BLOCK);
+	end else begin: nomux1
+		assign rdqmux[1] = rdqb[(STRM1_BLOCK+1)*MEM_WIDTH-1:STRM1_BLOCK*MEM_WIDTH];
+	end
+	assign m_axis_1_tdata = rdqmux[1][STRM1_WIDTH-1:0];
+end
+
+if(NSTREAMS >= 3) begin: en_strm2
+	if(STRM2_MUX == 1) begin: mux2
+		mux #(STRM2_NBLOCKS, MEM_WIDTH) m(rdqa[(STRM2_BLOCK+STRM2_NBLOCKS)*MEM_WIDTH-1:STRM2_BLOCK*MEM_WIDTH],rdqmux[2],rdblocka[1] - STRM2_BLOCK);
+	end else begin: nomux2
+		assign rdqmux[2] = rdqa[(STRM2_BLOCK+1)*MEM_WIDTH-1:STRM2_BLOCK*MEM_WIDTH];
+	end
+	assign m_axis_2_tdata = rdqmux[2][STRM2_WIDTH-1:0];
+end
+
+if(NSTREAMS >= 4) begin: en_strm3
+	if(STRM3_MUX == 1) begin: mux3
+		mux #(STRM3_NBLOCKS, MEM_WIDTH) m(rdqb[(STRM3_BLOCK+STRM3_NBLOCKS)*MEM_WIDTH-1:STRM3_BLOCK*MEM_WIDTH],rdqmux[3],rdblockb[1] - STRM3_BLOCK);
+	end else begin: nomux3
+		assign rdqmux[3] = rdqb[(STRM3_BLOCK+1)*MEM_WIDTH-1:STRM3_BLOCK*MEM_WIDTH];
+	end
+	assign m_axis_3_tdata = rdqmux[3][STRM3_WIDTH-1:0];
+end
+
+if(NSTREAMS >= 5) begin: en_strm4
+	if(STRM4_MUX == 1) begin: mux4
+		mux #(STRM4_NBLOCKS, MEM_WIDTH) m(rdqa[(STRM4_BLOCK+STRM4_NBLOCKS)*MEM_WIDTH-1:STRM4_BLOCK*MEM_WIDTH],rdqmux[4],rdblocka[1] - STRM4_BLOCK);
+	end else begin: nomux4
+		assign rdqmux[4] = rdqa[(STRM4_BLOCK+1)*MEM_WIDTH-1:STRM4_BLOCK*MEM_WIDTH];
+	end
+	assign m_axis_4_tdata = rdqmux[4][STRM4_WIDTH-1:0];
+end
+
+if(NSTREAMS >= 6) begin: en_strm5
+	if(STRM5_MUX == 1) begin: mux5
+		mux #(STRM5_NBLOCKS, MEM_WIDTH) m(rdqb[(STRM5_BLOCK+STRM5_NBLOCKS)*MEM_WIDTH-1:STRM5_BLOCK*MEM_WIDTH],rdqmux[5],rdblockb[1] - STRM5_BLOCK);
+	end else begin: nomux5
+		assign rdqmux[5] = rdqb[(STRM5_BLOCK+1)*MEM_WIDTH-1:STRM5_BLOCK*MEM_WIDTH];
+	end
+	assign m_axis_5_tdata = rdqmux[5][STRM5_WIDTH-1:0];
+end
+
+end else begin: singleblock
+
+if(NSTREAMS >= 1) begin: en_strm0_direct
+    assign m_axis_0_tdata = rdqa[STRM0_WIDTH-1:0];
+end
+if(NSTREAMS >= 2) begin: en_strm1_direct
+	assign m_axis_1_tdata = rdqb[STRM1_WIDTH-1:0];
+end
+if(NSTREAMS >= 3) begin: en_strm2_direct
+	assign m_axis_2_tdata = rdqa[STRM2_WIDTH-1:0];
+end
+if(NSTREAMS >= 4) begin: en_strm3_direct
+	assign m_axis_3_tdata = rdqb[STRM3_WIDTH-1:0];
+end
+if(NSTREAMS >= 5) begin: en_strm4_direct
+	assign m_axis_4_tdata = rdqa[STRM4_WIDTH-1:0];
+end
+if(NSTREAMS >= 6) begin: en_strm5_direct
+	assign m_axis_5_tdata = rdqb[STRM5_WIDTH-1:0];
+end
+
+end
+endgenerate
+
+//output to AXI Streams
+reg tvalid_pipe0[2:0];
+reg tvalid_pipe1[2:0];
+reg tvalid_pipe2[2:0];
+reg tvalid_pipe3[2:0];
+reg tvalid_pipe4[2:0];
+reg tvalid_pipe5[2:0];
+
+assign m_axis_0_tvalid = tvalid_pipe0[2];
+assign m_axis_1_tvalid = tvalid_pipe1[2];
+assign m_axis_2_tvalid = tvalid_pipe2[2];
+assign m_axis_3_tvalid = tvalid_pipe3[2];
+assign m_axis_4_tvalid = tvalid_pipe4[2];
+assign m_axis_5_tvalid = tvalid_pipe5[2];
+
+
+always @(posedge aclk) begin
+    tvalid_pipe0[0] <= strm0_incr_en;
+    tvalid_pipe1[0] <= strm1_incr_en;
+    tvalid_pipe2[0] <= strm2_incr_en;
+    tvalid_pipe3[0] <= strm3_incr_en;
+    tvalid_pipe4[0] <= strm4_incr_en;
+    tvalid_pipe5[0] <= strm5_incr_en;
+    for(i=0; i<2; i=i+1) begin: srl
+        tvalid_pipe0[i+1] <= tvalid_pipe0[i];
+        tvalid_pipe1[i+1] <= tvalid_pipe1[i];
+        tvalid_pipe2[i+1] <= tvalid_pipe2[i];
+        tvalid_pipe3[i+1] <= tvalid_pipe3[i];
+        tvalid_pipe4[i+1] <= tvalid_pipe4[i];
+        tvalid_pipe5[i+1] <= tvalid_pipe5[i];
+    end
+end
+
+assign config_q0 = 0;
+
+endmodule
diff --git a/finn-rtllib/memstream/hdl/memstream_singleblock.v b/finn-rtllib/memstream/hdl/memstream_singleblock.v
new file mode 100644
index 0000000000000000000000000000000000000000..53a71a91bc0561e275791ebcf55e2c4653331b1d
--- /dev/null
+++ b/finn-rtllib/memstream/hdl/memstream_singleblock.v
@@ -0,0 +1,229 @@
+/*
+ Copyright (c) 2020, Xilinx
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+
+ * Neither the name of FINN nor the names of its
+   contributors may be used to endorse or promote products derived from
+   this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/*
+    Implements a lightweight streamer for up to 2 streams in a single block of memory
+*/
+
+module memstream_singleblock
+#(
+    parameter CONFIG_EN = 1,
+    parameter NSTREAMS = 2,//1 up to 2
+
+    parameter MEM_DEPTH = 512,
+    parameter MEM_WIDTH = 32,
+    parameter MEM_INIT = "./",
+    parameter RAM_STYLE = "auto",
+
+    //widths per stream
+	parameter STRM0_WIDTH = 32,
+	parameter STRM1_WIDTH = 32,
+
+	//depths per stream
+	parameter STRM0_DEPTH = 256,
+	parameter STRM1_DEPTH = 256,
+
+	//offsets for each stream
+	parameter STRM0_OFFSET = 0,
+	parameter STRM1_OFFSET = 256
+)
+
+(
+    input aclk,
+    input aresetn,
+
+    //optional configuration interface compatible with ap_memory
+	input [31:0] config_address,
+	input config_ce,
+	input config_we,
+	input [MEM_WIDTH-1:0] config_d0,
+	output [MEM_WIDTH-1:0] config_q0,
+
+    //multiple output AXI Streams, TDATA width rounded to multiple of 8 bits
+    input m_axis_0_tready,
+    output m_axis_0_tvalid,
+    output [((STRM0_WIDTH+7)/8)*8-1:0] m_axis_0_tdata,
+
+    input m_axis_1_tready,
+    output m_axis_1_tvalid,
+    output [((STRM1_WIDTH+7)/8)*8-1:0] m_axis_1_tdata
+
+);
+
+
+//TODO: check that memory width is equal to the widest stream
+//TODO: check that the stream depths and offsets make sense, and that the memory depth is sufficient (or calculate depth here?)
+initial begin
+    if((NSTREAMS < 1) | (NSTREAMS > 2)) begin
+        $display("Invalid setting for NSTREAMS, please set in range [1,2]");
+        $finish();
+    end
+end
+
+//invert reset
+wire rst;
+assign rst = ~aresetn;
+
+wire strm0_incr_en;
+wire strm1_incr_en;
+
+assign strm0_incr_en = m_axis_0_tready | ~m_axis_0_tvalid;
+assign strm1_incr_en = m_axis_1_tready | ~m_axis_1_tvalid;
+
+generate
+if(MEM_DEPTH > 1) begin: use_ram
+
+//calculate width of memory address, with a minimum of 1 bit
+localparam BLOCKADRWIDTH = $clog2(MEM_DEPTH);
+
+reg [BLOCKADRWIDTH-1:0] strm0_addr = STRM0_OFFSET;
+wire strm0_rst;
+assign strm0_rst = strm0_incr_en & (strm0_addr == (STRM0_OFFSET + STRM0_DEPTH-1));
+
+//one address counter per stream; more LUTs but keeps routing short and local
+always @(posedge aclk) begin
+    if(strm0_rst | rst)
+        strm0_addr <= STRM0_OFFSET;
+    else if(strm0_incr_en)
+        strm0_addr <= strm0_addr + 1;
+end
+
+if(NSTREAMS == 1) begin: sdp
+
+ramb18_sdp
+#(
+    .ID(0),
+	.DWIDTH(MEM_WIDTH),
+	.AWIDTH(BLOCKADRWIDTH),
+    .DEPTH(MEM_DEPTH),
+	.MEM_INIT(MEM_INIT),
+    .RAM_STYLE(RAM_STYLE)
+)
+ram
+(
+	.clk(aclk),
+
+    .ena(config_ce),
+	.wea(config_we),
+	.addra(config_address[BLOCKADRWIDTH-1:0]),
+    .wdataa(config_d0),
+
+    .enb(strm0_incr_en),
+    .enqb(strm0_incr_en),
+	.addrb(strm0_addr),
+	.rdqb(m_axis_0_tdata)
+);
+
+
+end else begin: tdp
+
+reg [BLOCKADRWIDTH-1:0] strm1_addr = STRM1_OFFSET;
+wire strm1_rst;
+assign strm1_rst = strm1_incr_en & (strm1_addr == (STRM1_OFFSET + STRM1_DEPTH-1));
+
+always @(posedge aclk) begin
+    if(strm1_rst | rst)
+        strm1_addr <= STRM1_OFFSET;
+    else if(strm1_incr_en)
+        strm1_addr <= strm1_addr + 1;
+end
+
+ramb18_wf_dualport
+#(
+    .ID(0),
+	.DWIDTH(MEM_WIDTH),
+	.AWIDTH(BLOCKADRWIDTH),
+    .DEPTH(MEM_DEPTH),
+	.MEM_INIT(MEM_INIT),
+    .RAM_STYLE(RAM_STYLE)
+)
+ram
+(
+	.clk(aclk),
+
+	.wea(config_we),
+    .ena(strm0_incr_en | config_ce),
+    .enqa(strm0_incr_en | config_ce),
+	.addra(config_we ? config_address[BLOCKADRWIDTH-1:0] : strm0_addr),
+	.wdataa(config_d0),
+	.rdqa(m_axis_0_tdata),
+
+	.web(1'b0),
+    .enb(strm1_incr_en),
+    .enqb(strm1_incr_en),
+	.addrb(strm1_addr),
+	.wdatab('d0),
+	.rdqb(m_axis_1_tdata)
+);
+
+end
+
+end else begin: bypass
+
+reg [MEM_WIDTH-1:0] singleval[0:0];
+initial begin
+    $readmemh({MEM_INIT,"memblock_0.dat"}, singleval, 0, 0);
+end
+
+assign m_axis_0_tdata = singleval[0];
+assign m_axis_1_tdata = singleval[0];
+
+end
+endgenerate
+
+//signal valid after 2 tready cycles after initialization
+//then stay valid
+reg [1:0] tvalid_pipe0 = 2'd0;
+reg [1:0] tvalid_pipe1 = 2'd0;
+
+assign m_axis_0_tvalid = tvalid_pipe0[1];
+assign m_axis_1_tvalid = tvalid_pipe1[1];
+
+always @(posedge aclk) begin
+    if(rst) begin
+        tvalid_pipe0 <= 0;
+    end else if(strm0_incr_en) begin
+        tvalid_pipe0[0] <= 1;
+        tvalid_pipe0[1] <= tvalid_pipe0[0];
+    end
+end
+
+always @(posedge aclk) begin
+    if(rst) begin
+        tvalid_pipe1 <= 0;
+    end else if(strm1_incr_en) begin
+        tvalid_pipe1[0] <= 1;
+        tvalid_pipe1[1] <= tvalid_pipe1[0];
+    end
+end
+
+assign config_q0 = m_axis_0_tdata;
+
+endmodule
diff --git a/finn-rtllib/memstream/hdl/ramb18_sdp.v b/finn-rtllib/memstream/hdl/ramb18_sdp.v
new file mode 100644
index 0000000000000000000000000000000000000000..63a349f7d56197a9b5a66c837a2f003a6e8475e6
--- /dev/null
+++ b/finn-rtllib/memstream/hdl/ramb18_sdp.v
@@ -0,0 +1,96 @@
+/*
+ Copyright (c) 2020, Xilinx
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+
+ * Neither the name of FINN nor the names of its
+   contributors may be used to endorse or promote products derived from
+   this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+module ramb18_sdp
+#(
+    parameter ID = 0,
+    parameter DWIDTH = 18,
+    parameter AWIDTH = 10,
+    parameter DEPTH = 2**AWIDTH,
+    parameter MEM_INIT = "",
+    parameter RAM_STYLE = "auto"
+)
+(
+	input clk,
+
+	input ena,
+	input wea,
+	input [AWIDTH-1:0] addra,
+	input [DWIDTH-1:0] wdataa,
+
+    input enb,
+    input enqb,
+	input [AWIDTH-1:0] addrb,
+	output reg [DWIDTH-1:0] rdqb
+);
+
+(* ram_style = RAM_STYLE *) reg [DWIDTH-1:0] mem[0:DEPTH-1];
+reg [DWIDTH-1:0] rdatab;
+
+`ifdef SYNTHESIS
+reg [7:0] idx = ID;
+`else
+reg [15:0] idx;
+`endif
+
+//initialize memory
+initial begin
+  //note the hacky way of adding a filename memblock_ID.dat to the path provided in MEM_INIT
+  //ID can go up to 99
+  if (ID < 0 && ID > 99) begin
+    $display("ID out of range [0-99]");
+    $finish();
+  end
+	//MEM_INIT path must be terminated by /
+  `ifdef SYNTHESIS
+  if (ID < 10)
+    $readmemh({MEM_INIT,"memblock_",idx+8'd48,".dat"}, mem, 0, DEPTH-1);
+  else
+    $readmemh({MEM_INIT,"memblock_",(idx/10)+8'd48,(idx%10)+8'd48,".dat"}, mem, 0, DEPTH-1);
+  `else
+  $sformat(idx,"%0d",ID);
+  if (ID < 10)
+    $readmemh({MEM_INIT,"memblock_",idx[7:0],".dat"}, mem, 0, DEPTH-1);
+  else
+    $readmemh({MEM_INIT,"memblock_",idx,".dat"}, mem, 0, DEPTH-1);
+  `endif
+end
+
+//memory ports, with output pipeline register
+always @(posedge clk) begin
+    if(wea)
+        mem[addra] <= wdataa;
+    if(enb)
+        rdatab <= mem[addrb];
+    if(enqb)
+        rdqb <= rdatab;
+end
+
+endmodule
diff --git a/finn-rtllib/memstream/hdl/ramb18_wf_dualport.v b/finn-rtllib/memstream/hdl/ramb18_wf_dualport.v
index 4219d0f1c74bddff690b0d0cb21ce6a448c01c97..c7850106ae4cad21f1230477ee86062411e531c8 100644
--- a/finn-rtllib/memstream/hdl/ramb18_wf_dualport.v
+++ b/finn-rtllib/memstream/hdl/ramb18_wf_dualport.v
@@ -31,26 +31,31 @@
 module ramb18_wf_dualport
 #(
     parameter ID = 0,
-	parameter DWIDTH = 18,
-	parameter AWIDTH = 10,
-	parameter MEM_INIT = "",
-  parameter RAM_STYLE = "auto"
+    parameter DWIDTH = 18,
+    parameter AWIDTH = 10,
+    parameter DEPTH = 2**AWIDTH,
+    parameter MEM_INIT = "",
+    parameter RAM_STYLE = "auto"
 )
 (
 	input clk,
 
 	input wea,
+    input ena,
+    input enqa,
 	input [AWIDTH-1:0] addra,
 	input [DWIDTH-1:0] wdataa,
 	output reg [DWIDTH-1:0] rdqa,
 
 	input web,
+    input enb,
+    input enqb,
 	input [AWIDTH-1:0] addrb,
 	input [DWIDTH-1:0] wdatab,
 	output reg [DWIDTH-1:0] rdqb
 );
 
-(* ram_style = RAM_STYLE *) reg [DWIDTH-1:0] mem[0:2**AWIDTH-1];
+(* ram_style = RAM_STYLE *) reg [DWIDTH-1:0] mem[0:DEPTH-1];
 reg [DWIDTH-1:0] rdataa;
 reg [DWIDTH-1:0] rdatab;
 
@@ -71,30 +76,36 @@ initial begin
 	//MEM_INIT path must be terminated by /
   `ifdef SYNTHESIS
   if (ID < 10)
-    $readmemh({MEM_INIT,"memblock_",idx+8'd48,".dat"}, mem, 0, 1023);
+    $readmemh({MEM_INIT,"memblock_",idx+8'd48,".dat"}, mem, 0, DEPTH-1);
   else
-    $readmemh({MEM_INIT,"memblock_",(idx/10)+8'd48,(idx%10)+8'd48,".dat"}, mem, 0, 1023);
+    $readmemh({MEM_INIT,"memblock_",(idx/10)+8'd48,(idx%10)+8'd48,".dat"}, mem, 0, DEPTH-1);
   `else
   $sformat(idx,"%0d",ID);
   if (ID < 10)
-    $readmemh({MEM_INIT,"memblock_",idx[7:0],".dat"}, mem, 0, 1023);
+    $readmemh({MEM_INIT,"memblock_",idx[7:0],".dat"}, mem, 0, DEPTH-1);
   else
-    $readmemh({MEM_INIT,"memblock_",idx,".dat"}, mem, 0, 1023);
+    $readmemh({MEM_INIT,"memblock_",idx,".dat"}, mem, 0, DEPTH-1);
   `endif
 end
 
 //memory ports, with output pipeline register
 always @(posedge clk) begin
-    if(wea)
-        mem[addra] <= wdataa;
-    rdataa <= mem[addra];
-    rdqa <= rdataa;
+    if(ena) begin
+        if(wea)
+            mem[addra] <= wdataa;
+        rdataa <= mem[addra];
+    end
+    if(enqa)
+        rdqa <= rdataa;
 end
 always @(posedge clk) begin
-    if(web)
-        mem[addrb] <= wdatab;
-    rdatab <= mem[addrb];
-    rdqb <= rdatab;
+    if(enb) begin
+        if(web)
+            mem[addrb] <= wdatab;
+        rdatab <= mem[addrb];
+    end
+    if(enqb)
+        rdqb <= rdatab;
 end
 
 endmodule
diff --git a/finn-rtllib/memstream/xgui/memstream_v1_0.tcl b/finn-rtllib/memstream/xgui/memstream_v1_0.tcl
index e5cbb670da94612e8de73f48cfa4562f89e124d1..7ce84b44a7cd6e20b59fd1b21a467d137ff0288f 100644
--- a/finn-rtllib/memstream/xgui/memstream_v1_0.tcl
+++ b/finn-rtllib/memstream/xgui/memstream_v1_0.tcl
@@ -1,383 +1,370 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# Definitional proc to organize widgets for parameters.
-proc init_gui { IPINST } {
-  ipgui::add_param $IPINST -name "Component_Name"
-  #Adding Page
-  set Page_0 [ipgui::add_page $IPINST -name "Page 0"]
-  ipgui::add_param $IPINST -name "CONFIG_EN" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "MEM_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "MEM_INIT" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "MEM_WIDTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "NSTREAMS" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM0_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM0_OFFSET" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM0_WIDTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM1_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM1_OFFSET" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM1_WIDTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM2_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM2_OFFSET" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM2_WIDTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM3_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM3_OFFSET" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM3_WIDTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM4_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM4_OFFSET" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM4_WIDTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM5_DEPTH" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM5_OFFSET" -parent ${Page_0}
-  ipgui::add_param $IPINST -name "STRM5_WIDTH" -parent ${Page_0}
-
-
-}
-
-proc update_PARAM_VALUE.CONFIG_EN { PARAM_VALUE.CONFIG_EN } {
-	# Procedure called to update CONFIG_EN when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.CONFIG_EN { PARAM_VALUE.CONFIG_EN } {
-	# Procedure called to validate CONFIG_EN
-	return true
-}
-
-proc update_PARAM_VALUE.MEM_DEPTH { PARAM_VALUE.MEM_DEPTH } {
-	# Procedure called to update MEM_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.MEM_DEPTH { PARAM_VALUE.MEM_DEPTH } {
-	# Procedure called to validate MEM_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.MEM_INIT { PARAM_VALUE.MEM_INIT } {
-	# Procedure called to update MEM_INIT when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.MEM_INIT { PARAM_VALUE.MEM_INIT } {
-	# Procedure called to validate MEM_INIT
-	return true
-}
-
-proc update_PARAM_VALUE.MEM_WIDTH { PARAM_VALUE.MEM_WIDTH } {
-	# Procedure called to update MEM_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.MEM_WIDTH { PARAM_VALUE.MEM_WIDTH } {
-	# Procedure called to validate MEM_WIDTH
-	return true
-}
-
-proc update_PARAM_VALUE.NSTREAMS { PARAM_VALUE.NSTREAMS } {
-	# Procedure called to update NSTREAMS when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.NSTREAMS { PARAM_VALUE.NSTREAMS } {
-	# Procedure called to validate NSTREAMS
-	return true
-}
-
-proc update_PARAM_VALUE.STRM0_DEPTH { PARAM_VALUE.STRM0_DEPTH } {
-	# Procedure called to update STRM0_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM0_DEPTH { PARAM_VALUE.STRM0_DEPTH } {
-	# Procedure called to validate STRM0_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM0_OFFSET { PARAM_VALUE.STRM0_OFFSET } {
-	# Procedure called to update STRM0_OFFSET when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM0_OFFSET { PARAM_VALUE.STRM0_OFFSET } {
-	# Procedure called to validate STRM0_OFFSET
-	return true
-}
-
-proc update_PARAM_VALUE.STRM0_WIDTH { PARAM_VALUE.STRM0_WIDTH } {
-	# Procedure called to update STRM0_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM0_WIDTH { PARAM_VALUE.STRM0_WIDTH } {
-	# Procedure called to validate STRM0_WIDTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM1_DEPTH { PARAM_VALUE.STRM1_DEPTH } {
-	# Procedure called to update STRM1_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM1_DEPTH { PARAM_VALUE.STRM1_DEPTH } {
-	# Procedure called to validate STRM1_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM1_OFFSET { PARAM_VALUE.STRM1_OFFSET } {
-	# Procedure called to update STRM1_OFFSET when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM1_OFFSET { PARAM_VALUE.STRM1_OFFSET } {
-	# Procedure called to validate STRM1_OFFSET
-	return true
-}
-
-proc update_PARAM_VALUE.STRM1_WIDTH { PARAM_VALUE.STRM1_WIDTH } {
-	# Procedure called to update STRM1_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM1_WIDTH { PARAM_VALUE.STRM1_WIDTH } {
-	# Procedure called to validate STRM1_WIDTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM2_DEPTH { PARAM_VALUE.STRM2_DEPTH } {
-	# Procedure called to update STRM2_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM2_DEPTH { PARAM_VALUE.STRM2_DEPTH } {
-	# Procedure called to validate STRM2_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM2_OFFSET { PARAM_VALUE.STRM2_OFFSET } {
-	# Procedure called to update STRM2_OFFSET when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM2_OFFSET { PARAM_VALUE.STRM2_OFFSET } {
-	# Procedure called to validate STRM2_OFFSET
-	return true
-}
-
-proc update_PARAM_VALUE.STRM2_WIDTH { PARAM_VALUE.STRM2_WIDTH } {
-	# Procedure called to update STRM2_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM2_WIDTH { PARAM_VALUE.STRM2_WIDTH } {
-	# Procedure called to validate STRM2_WIDTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM3_DEPTH { PARAM_VALUE.STRM3_DEPTH } {
-	# Procedure called to update STRM3_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM3_DEPTH { PARAM_VALUE.STRM3_DEPTH } {
-	# Procedure called to validate STRM3_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM3_OFFSET { PARAM_VALUE.STRM3_OFFSET } {
-	# Procedure called to update STRM3_OFFSET when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM3_OFFSET { PARAM_VALUE.STRM3_OFFSET } {
-	# Procedure called to validate STRM3_OFFSET
-	return true
-}
-
-proc update_PARAM_VALUE.STRM3_WIDTH { PARAM_VALUE.STRM3_WIDTH } {
-	# Procedure called to update STRM3_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM3_WIDTH { PARAM_VALUE.STRM3_WIDTH } {
-	# Procedure called to validate STRM3_WIDTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM4_DEPTH { PARAM_VALUE.STRM4_DEPTH } {
-	# Procedure called to update STRM4_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM4_DEPTH { PARAM_VALUE.STRM4_DEPTH } {
-	# Procedure called to validate STRM4_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM4_OFFSET { PARAM_VALUE.STRM4_OFFSET } {
-	# Procedure called to update STRM4_OFFSET when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM4_OFFSET { PARAM_VALUE.STRM4_OFFSET } {
-	# Procedure called to validate STRM4_OFFSET
-	return true
-}
-
-proc update_PARAM_VALUE.STRM4_WIDTH { PARAM_VALUE.STRM4_WIDTH } {
-	# Procedure called to update STRM4_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM4_WIDTH { PARAM_VALUE.STRM4_WIDTH } {
-	# Procedure called to validate STRM4_WIDTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM5_DEPTH { PARAM_VALUE.STRM5_DEPTH } {
-	# Procedure called to update STRM5_DEPTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM5_DEPTH { PARAM_VALUE.STRM5_DEPTH } {
-	# Procedure called to validate STRM5_DEPTH
-	return true
-}
-
-proc update_PARAM_VALUE.STRM5_OFFSET { PARAM_VALUE.STRM5_OFFSET } {
-	# Procedure called to update STRM5_OFFSET when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM5_OFFSET { PARAM_VALUE.STRM5_OFFSET } {
-	# Procedure called to validate STRM5_OFFSET
-	return true
-}
-
-proc update_PARAM_VALUE.STRM5_WIDTH { PARAM_VALUE.STRM5_WIDTH } {
-	# Procedure called to update STRM5_WIDTH when any of the dependent parameters in the arguments change
-}
-
-proc validate_PARAM_VALUE.STRM5_WIDTH { PARAM_VALUE.STRM5_WIDTH } {
-	# Procedure called to validate STRM5_WIDTH
-	return true
-}
-
-
-proc update_MODELPARAM_VALUE.CONFIG_EN { MODELPARAM_VALUE.CONFIG_EN PARAM_VALUE.CONFIG_EN } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.CONFIG_EN}] ${MODELPARAM_VALUE.CONFIG_EN}
-}
-
-proc update_MODELPARAM_VALUE.NSTREAMS { MODELPARAM_VALUE.NSTREAMS PARAM_VALUE.NSTREAMS } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.NSTREAMS}] ${MODELPARAM_VALUE.NSTREAMS}
-}
-
-proc update_MODELPARAM_VALUE.MEM_DEPTH { MODELPARAM_VALUE.MEM_DEPTH PARAM_VALUE.MEM_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.MEM_DEPTH}] ${MODELPARAM_VALUE.MEM_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.MEM_WIDTH { MODELPARAM_VALUE.MEM_WIDTH PARAM_VALUE.MEM_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.MEM_WIDTH}] ${MODELPARAM_VALUE.MEM_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.MEM_INIT { MODELPARAM_VALUE.MEM_INIT PARAM_VALUE.MEM_INIT } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.MEM_INIT}] ${MODELPARAM_VALUE.MEM_INIT}
-}
-
-proc update_MODELPARAM_VALUE.STRM0_WIDTH { MODELPARAM_VALUE.STRM0_WIDTH PARAM_VALUE.STRM0_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM0_WIDTH}] ${MODELPARAM_VALUE.STRM0_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM1_WIDTH { MODELPARAM_VALUE.STRM1_WIDTH PARAM_VALUE.STRM1_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM1_WIDTH}] ${MODELPARAM_VALUE.STRM1_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM2_WIDTH { MODELPARAM_VALUE.STRM2_WIDTH PARAM_VALUE.STRM2_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM2_WIDTH}] ${MODELPARAM_VALUE.STRM2_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM3_WIDTH { MODELPARAM_VALUE.STRM3_WIDTH PARAM_VALUE.STRM3_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM3_WIDTH}] ${MODELPARAM_VALUE.STRM3_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM4_WIDTH { MODELPARAM_VALUE.STRM4_WIDTH PARAM_VALUE.STRM4_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM4_WIDTH}] ${MODELPARAM_VALUE.STRM4_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM5_WIDTH { MODELPARAM_VALUE.STRM5_WIDTH PARAM_VALUE.STRM5_WIDTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM5_WIDTH}] ${MODELPARAM_VALUE.STRM5_WIDTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM0_DEPTH { MODELPARAM_VALUE.STRM0_DEPTH PARAM_VALUE.STRM0_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM0_DEPTH}] ${MODELPARAM_VALUE.STRM0_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM1_DEPTH { MODELPARAM_VALUE.STRM1_DEPTH PARAM_VALUE.STRM1_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM1_DEPTH}] ${MODELPARAM_VALUE.STRM1_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM2_DEPTH { MODELPARAM_VALUE.STRM2_DEPTH PARAM_VALUE.STRM2_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM2_DEPTH}] ${MODELPARAM_VALUE.STRM2_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM3_DEPTH { MODELPARAM_VALUE.STRM3_DEPTH PARAM_VALUE.STRM3_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM3_DEPTH}] ${MODELPARAM_VALUE.STRM3_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM4_DEPTH { MODELPARAM_VALUE.STRM4_DEPTH PARAM_VALUE.STRM4_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM4_DEPTH}] ${MODELPARAM_VALUE.STRM4_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM5_DEPTH { MODELPARAM_VALUE.STRM5_DEPTH PARAM_VALUE.STRM5_DEPTH } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM5_DEPTH}] ${MODELPARAM_VALUE.STRM5_DEPTH}
-}
-
-proc update_MODELPARAM_VALUE.STRM0_OFFSET { MODELPARAM_VALUE.STRM0_OFFSET PARAM_VALUE.STRM0_OFFSET } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM0_OFFSET}] ${MODELPARAM_VALUE.STRM0_OFFSET}
-}
-
-proc update_MODELPARAM_VALUE.STRM1_OFFSET { MODELPARAM_VALUE.STRM1_OFFSET PARAM_VALUE.STRM1_OFFSET } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM1_OFFSET}] ${MODELPARAM_VALUE.STRM1_OFFSET}
-}
-
-proc update_MODELPARAM_VALUE.STRM2_OFFSET { MODELPARAM_VALUE.STRM2_OFFSET PARAM_VALUE.STRM2_OFFSET } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM2_OFFSET}] ${MODELPARAM_VALUE.STRM2_OFFSET}
-}
-
-proc update_MODELPARAM_VALUE.STRM3_OFFSET { MODELPARAM_VALUE.STRM3_OFFSET PARAM_VALUE.STRM3_OFFSET } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM3_OFFSET}] ${MODELPARAM_VALUE.STRM3_OFFSET}
-}
-
-proc update_MODELPARAM_VALUE.STRM4_OFFSET { MODELPARAM_VALUE.STRM4_OFFSET PARAM_VALUE.STRM4_OFFSET } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM4_OFFSET}] ${MODELPARAM_VALUE.STRM4_OFFSET}
-}
-
-proc update_MODELPARAM_VALUE.STRM5_OFFSET { MODELPARAM_VALUE.STRM5_OFFSET PARAM_VALUE.STRM5_OFFSET } {
-	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
-	set_property value [get_property value ${PARAM_VALUE.STRM5_OFFSET}] ${MODELPARAM_VALUE.STRM5_OFFSET}
-}
-
+# Definitional proc to organize widgets for parameters.
+proc init_gui { IPINST } {
+  ipgui::add_param $IPINST -name "Component_Name"
+  #Adding Page
+  set Page_0 [ipgui::add_page $IPINST -name "Page 0"]
+  ipgui::add_param $IPINST -name "CONFIG_EN" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "MEM_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "MEM_INIT" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "MEM_WIDTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "RAM_STYLE" -parent ${Page_0} -widget comboBox
+  ipgui::add_param $IPINST -name "NSTREAMS" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM0_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM0_OFFSET" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM0_WIDTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM1_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM1_OFFSET" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM1_WIDTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM2_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM2_OFFSET" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM2_WIDTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM3_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM3_OFFSET" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM3_WIDTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM4_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM4_OFFSET" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM4_WIDTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM5_DEPTH" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM5_OFFSET" -parent ${Page_0}
+  ipgui::add_param $IPINST -name "STRM5_WIDTH" -parent ${Page_0}
+
+
+}
+
+proc update_PARAM_VALUE.CONFIG_EN { PARAM_VALUE.CONFIG_EN } {
+	# Procedure called to update CONFIG_EN when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.CONFIG_EN { PARAM_VALUE.CONFIG_EN } {
+	# Procedure called to validate CONFIG_EN
+	return true
+}
+
+proc update_PARAM_VALUE.MEM_DEPTH { PARAM_VALUE.MEM_DEPTH } {
+	# Procedure called to update MEM_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.MEM_DEPTH { PARAM_VALUE.MEM_DEPTH } {
+	# Procedure called to validate MEM_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.MEM_INIT { PARAM_VALUE.MEM_INIT } {
+	# Procedure called to update MEM_INIT when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.MEM_INIT { PARAM_VALUE.MEM_INIT } {
+	# Procedure called to validate MEM_INIT
+	return true
+}
+
+proc update_PARAM_VALUE.MEM_WIDTH { PARAM_VALUE.MEM_WIDTH } {
+	# Procedure called to update MEM_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.MEM_WIDTH { PARAM_VALUE.MEM_WIDTH } {
+	# Procedure called to validate MEM_WIDTH
+	return true
+}
+
+proc update_PARAM_VALUE.NSTREAMS { PARAM_VALUE.NSTREAMS } {
+	# Procedure called to update NSTREAMS when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.NSTREAMS { PARAM_VALUE.NSTREAMS } {
+	# Procedure called to validate NSTREAMS
+	return true
+}
+
+proc update_PARAM_VALUE.RAM_STYLE { PARAM_VALUE.RAM_STYLE } {
+	# Procedure called to update RAM_STYLE when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.RAM_STYLE { PARAM_VALUE.RAM_STYLE } {
+	# Procedure called to validate RAM_STYLE
+	return true
+}
+
+proc update_PARAM_VALUE.STRM0_DEPTH { PARAM_VALUE.STRM0_DEPTH } {
+	# Procedure called to update STRM0_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM0_DEPTH { PARAM_VALUE.STRM0_DEPTH } {
+	# Procedure called to validate STRM0_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM0_OFFSET { PARAM_VALUE.STRM0_OFFSET } {
+	# Procedure called to update STRM0_OFFSET when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM0_OFFSET { PARAM_VALUE.STRM0_OFFSET } {
+	# Procedure called to validate STRM0_OFFSET
+	return true
+}
+
+proc update_PARAM_VALUE.STRM0_WIDTH { PARAM_VALUE.STRM0_WIDTH } {
+	# Procedure called to update STRM0_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM0_WIDTH { PARAM_VALUE.STRM0_WIDTH } {
+	# Procedure called to validate STRM0_WIDTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM1_DEPTH { PARAM_VALUE.STRM1_DEPTH } {
+	# Procedure called to update STRM1_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM1_DEPTH { PARAM_VALUE.STRM1_DEPTH } {
+	# Procedure called to validate STRM1_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM1_OFFSET { PARAM_VALUE.STRM1_OFFSET } {
+	# Procedure called to update STRM1_OFFSET when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM1_OFFSET { PARAM_VALUE.STRM1_OFFSET } {
+	# Procedure called to validate STRM1_OFFSET
+	return true
+}
+
+proc update_PARAM_VALUE.STRM1_WIDTH { PARAM_VALUE.STRM1_WIDTH } {
+	# Procedure called to update STRM1_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM1_WIDTH { PARAM_VALUE.STRM1_WIDTH } {
+	# Procedure called to validate STRM1_WIDTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM2_DEPTH { PARAM_VALUE.STRM2_DEPTH } {
+	# Procedure called to update STRM2_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM2_DEPTH { PARAM_VALUE.STRM2_DEPTH } {
+	# Procedure called to validate STRM2_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM2_OFFSET { PARAM_VALUE.STRM2_OFFSET } {
+	# Procedure called to update STRM2_OFFSET when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM2_OFFSET { PARAM_VALUE.STRM2_OFFSET } {
+	# Procedure called to validate STRM2_OFFSET
+	return true
+}
+
+proc update_PARAM_VALUE.STRM2_WIDTH { PARAM_VALUE.STRM2_WIDTH } {
+	# Procedure called to update STRM2_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM2_WIDTH { PARAM_VALUE.STRM2_WIDTH } {
+	# Procedure called to validate STRM2_WIDTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM3_DEPTH { PARAM_VALUE.STRM3_DEPTH } {
+	# Procedure called to update STRM3_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM3_DEPTH { PARAM_VALUE.STRM3_DEPTH } {
+	# Procedure called to validate STRM3_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM3_OFFSET { PARAM_VALUE.STRM3_OFFSET } {
+	# Procedure called to update STRM3_OFFSET when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM3_OFFSET { PARAM_VALUE.STRM3_OFFSET } {
+	# Procedure called to validate STRM3_OFFSET
+	return true
+}
+
+proc update_PARAM_VALUE.STRM3_WIDTH { PARAM_VALUE.STRM3_WIDTH } {
+	# Procedure called to update STRM3_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM3_WIDTH { PARAM_VALUE.STRM3_WIDTH } {
+	# Procedure called to validate STRM3_WIDTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM4_DEPTH { PARAM_VALUE.STRM4_DEPTH } {
+	# Procedure called to update STRM4_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM4_DEPTH { PARAM_VALUE.STRM4_DEPTH } {
+	# Procedure called to validate STRM4_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM4_OFFSET { PARAM_VALUE.STRM4_OFFSET } {
+	# Procedure called to update STRM4_OFFSET when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM4_OFFSET { PARAM_VALUE.STRM4_OFFSET } {
+	# Procedure called to validate STRM4_OFFSET
+	return true
+}
+
+proc update_PARAM_VALUE.STRM4_WIDTH { PARAM_VALUE.STRM4_WIDTH } {
+	# Procedure called to update STRM4_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM4_WIDTH { PARAM_VALUE.STRM4_WIDTH } {
+	# Procedure called to validate STRM4_WIDTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM5_DEPTH { PARAM_VALUE.STRM5_DEPTH } {
+	# Procedure called to update STRM5_DEPTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM5_DEPTH { PARAM_VALUE.STRM5_DEPTH } {
+	# Procedure called to validate STRM5_DEPTH
+	return true
+}
+
+proc update_PARAM_VALUE.STRM5_OFFSET { PARAM_VALUE.STRM5_OFFSET } {
+	# Procedure called to update STRM5_OFFSET when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM5_OFFSET { PARAM_VALUE.STRM5_OFFSET } {
+	# Procedure called to validate STRM5_OFFSET
+	return true
+}
+
+proc update_PARAM_VALUE.STRM5_WIDTH { PARAM_VALUE.STRM5_WIDTH } {
+	# Procedure called to update STRM5_WIDTH when any of the dependent parameters in the arguments change
+}
+
+proc validate_PARAM_VALUE.STRM5_WIDTH { PARAM_VALUE.STRM5_WIDTH } {
+	# Procedure called to validate STRM5_WIDTH
+	return true
+}
+
+
+proc update_MODELPARAM_VALUE.CONFIG_EN { MODELPARAM_VALUE.CONFIG_EN PARAM_VALUE.CONFIG_EN } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.CONFIG_EN}] ${MODELPARAM_VALUE.CONFIG_EN}
+}
+
+proc update_MODELPARAM_VALUE.NSTREAMS { MODELPARAM_VALUE.NSTREAMS PARAM_VALUE.NSTREAMS } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.NSTREAMS}] ${MODELPARAM_VALUE.NSTREAMS}
+}
+
+proc update_MODELPARAM_VALUE.MEM_DEPTH { MODELPARAM_VALUE.MEM_DEPTH PARAM_VALUE.MEM_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.MEM_DEPTH}] ${MODELPARAM_VALUE.MEM_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.MEM_WIDTH { MODELPARAM_VALUE.MEM_WIDTH PARAM_VALUE.MEM_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.MEM_WIDTH}] ${MODELPARAM_VALUE.MEM_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.MEM_INIT { MODELPARAM_VALUE.MEM_INIT PARAM_VALUE.MEM_INIT } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.MEM_INIT}] ${MODELPARAM_VALUE.MEM_INIT}
+}
+
+proc update_MODELPARAM_VALUE.RAM_STYLE { MODELPARAM_VALUE.RAM_STYLE PARAM_VALUE.RAM_STYLE } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.RAM_STYLE}] ${MODELPARAM_VALUE.RAM_STYLE}
+}
+
+proc update_MODELPARAM_VALUE.STRM0_WIDTH { MODELPARAM_VALUE.STRM0_WIDTH PARAM_VALUE.STRM0_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM0_WIDTH}] ${MODELPARAM_VALUE.STRM0_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM1_WIDTH { MODELPARAM_VALUE.STRM1_WIDTH PARAM_VALUE.STRM1_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM1_WIDTH}] ${MODELPARAM_VALUE.STRM1_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM2_WIDTH { MODELPARAM_VALUE.STRM2_WIDTH PARAM_VALUE.STRM2_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM2_WIDTH}] ${MODELPARAM_VALUE.STRM2_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM3_WIDTH { MODELPARAM_VALUE.STRM3_WIDTH PARAM_VALUE.STRM3_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM3_WIDTH}] ${MODELPARAM_VALUE.STRM3_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM4_WIDTH { MODELPARAM_VALUE.STRM4_WIDTH PARAM_VALUE.STRM4_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM4_WIDTH}] ${MODELPARAM_VALUE.STRM4_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM5_WIDTH { MODELPARAM_VALUE.STRM5_WIDTH PARAM_VALUE.STRM5_WIDTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM5_WIDTH}] ${MODELPARAM_VALUE.STRM5_WIDTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM0_DEPTH { MODELPARAM_VALUE.STRM0_DEPTH PARAM_VALUE.STRM0_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM0_DEPTH}] ${MODELPARAM_VALUE.STRM0_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM1_DEPTH { MODELPARAM_VALUE.STRM1_DEPTH PARAM_VALUE.STRM1_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM1_DEPTH}] ${MODELPARAM_VALUE.STRM1_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM2_DEPTH { MODELPARAM_VALUE.STRM2_DEPTH PARAM_VALUE.STRM2_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM2_DEPTH}] ${MODELPARAM_VALUE.STRM2_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM3_DEPTH { MODELPARAM_VALUE.STRM3_DEPTH PARAM_VALUE.STRM3_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM3_DEPTH}] ${MODELPARAM_VALUE.STRM3_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM4_DEPTH { MODELPARAM_VALUE.STRM4_DEPTH PARAM_VALUE.STRM4_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM4_DEPTH}] ${MODELPARAM_VALUE.STRM4_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM5_DEPTH { MODELPARAM_VALUE.STRM5_DEPTH PARAM_VALUE.STRM5_DEPTH } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM5_DEPTH}] ${MODELPARAM_VALUE.STRM5_DEPTH}
+}
+
+proc update_MODELPARAM_VALUE.STRM0_OFFSET { MODELPARAM_VALUE.STRM0_OFFSET PARAM_VALUE.STRM0_OFFSET } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM0_OFFSET}] ${MODELPARAM_VALUE.STRM0_OFFSET}
+}
+
+proc update_MODELPARAM_VALUE.STRM1_OFFSET { MODELPARAM_VALUE.STRM1_OFFSET PARAM_VALUE.STRM1_OFFSET } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM1_OFFSET}] ${MODELPARAM_VALUE.STRM1_OFFSET}
+}
+
+proc update_MODELPARAM_VALUE.STRM2_OFFSET { MODELPARAM_VALUE.STRM2_OFFSET PARAM_VALUE.STRM2_OFFSET } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM2_OFFSET}] ${MODELPARAM_VALUE.STRM2_OFFSET}
+}
+
+proc update_MODELPARAM_VALUE.STRM3_OFFSET { MODELPARAM_VALUE.STRM3_OFFSET PARAM_VALUE.STRM3_OFFSET } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM3_OFFSET}] ${MODELPARAM_VALUE.STRM3_OFFSET}
+}
+
+proc update_MODELPARAM_VALUE.STRM4_OFFSET { MODELPARAM_VALUE.STRM4_OFFSET PARAM_VALUE.STRM4_OFFSET } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM4_OFFSET}] ${MODELPARAM_VALUE.STRM4_OFFSET}
+}
+
+proc update_MODELPARAM_VALUE.STRM5_OFFSET { MODELPARAM_VALUE.STRM5_OFFSET PARAM_VALUE.STRM5_OFFSET } {
+	# Procedure called to set VHDL generic/Verilog parameter value(s) based on TCL parameter value
+	set_property value [get_property value ${PARAM_VALUE.STRM5_OFFSET}] ${MODELPARAM_VALUE.STRM5_OFFSET}
+}
+
diff --git a/notebooks/basics/1_brevitas_network_import.ipynb b/notebooks/basics/1_brevitas_network_import.ipynb
index 0abf671a57ddeebb9e93745d2dfafb19e5a8373e..3c9cad615e168e19c7f5dfef45e7c7c60965d1e3 100644
--- a/notebooks/basics/1_brevitas_network_import.ipynb
+++ b/notebooks/basics/1_brevitas_network_import.ipynb
@@ -31,29 +31,64 @@
    "source": [
     "## 1. Load up the trained PyTorch model\n",
     "\n",
-    "The FINN Docker image comes with several [example Brevitas networks](https://github.com/maltanar/brevitas_cnv_lfc), and we'll use the LFC-w1a1 model as the example network here. This is a binarized fully connected network trained on the MNIST dataset. Let's start by looking at what the PyTorch network definition looks like:"
+    "The FINN Docker image comes with several [example Brevitas networks](https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq), and we'll use the LFC-w1a1 model as the example network here. This is a binarized fully connected network trained on the MNIST dataset. Let's start by looking at what the PyTorch network definition looks like:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 2,
+   "execution_count": 6,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
+      "# MIT License\n",
+      "#\n",
+      "# Copyright (c) 2019 Xilinx\n",
+      "#\n",
+      "# Permission is hereby granted, free of charge, to any person obtaining a copy\n",
+      "# of this software and associated documentation files (the \"Software\"), to deal\n",
+      "# in the Software without restriction, including without limitation the rights\n",
+      "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n",
+      "# copies of the Software, and to permit persons to whom the Software is\n",
+      "# furnished to do so, subject to the following conditions:\n",
+      "#\n",
+      "# The above copyright notice and this permission notice shall be included in all\n",
+      "# copies or substantial portions of the Software.\n",
+      "#\n",
+      "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n",
+      "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n",
+      "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n",
+      "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n",
+      "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n",
+      "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n",
+      "# SOFTWARE.\n",
+      "\n",
+      "from functools import reduce\n",
+      "from operator import mul\n",
+      "\n",
+      "from torch.nn import Module, ModuleList, BatchNorm1d, Dropout\n",
+      "import torch\n",
+      "\n",
+      "from .common import get_quant_linear, get_act_quant, get_quant_type, QuantLinear\n",
+      "\n",
+      "FC_OUT_FEATURES = [1024, 1024, 1024]\n",
+      "INTERMEDIATE_FC_PER_OUT_CH_SCALING = True\n",
+      "LAST_FC_PER_OUT_CH_SCALING = False\n",
+      "IN_DROPOUT = 0.2\n",
+      "HIDDEN_DROPOUT = 0.2\n",
+      "\n",
+      "\n",
       "class LFC(Module):\n",
       "\n",
       "    def __init__(self, num_classes=10, weight_bit_width=None, act_bit_width=None,\n",
-      "                 in_bit_width=None, in_ch=1, in_features=(28, 28), device=\"cpu\"):\n",
+      "                 in_bit_width=None, in_ch=1, in_features=(28, 28)):\n",
       "        super(LFC, self).__init__()\n",
-      "        self.device = device\n",
       "\n",
       "        weight_quant_type = get_quant_type(weight_bit_width)\n",
       "        act_quant_type = get_quant_type(act_bit_width)\n",
       "        in_quant_type = get_quant_type(in_bit_width)\n",
-      "        stats_op = get_stats_op(weight_quant_type)\n",
       "\n",
       "        self.features = ModuleList()\n",
       "        self.features.append(get_act_quant(in_bit_width, in_quant_type))\n",
@@ -64,8 +99,7 @@
       "                                                  out_features=out_features,\n",
       "                                                  per_out_ch_scaling=INTERMEDIATE_FC_PER_OUT_CH_SCALING,\n",
       "                                                  bit_width=weight_bit_width,\n",
-      "                                                  quant_type=weight_quant_type,\n",
-      "                                                  stats_op=stats_op))\n",
+      "                                                  quant_type=weight_quant_type))\n",
       "            in_features = out_features\n",
       "            self.features.append(BatchNorm1d(num_features=in_features))\n",
       "            self.features.append(get_act_quant(act_bit_width, act_quant_type))\n",
@@ -74,8 +108,7 @@
       "                                   out_features=num_classes,\n",
       "                                   per_out_ch_scaling=LAST_FC_PER_OUT_CH_SCALING,\n",
       "                                   bit_width=weight_bit_width,\n",
-      "                                   quant_type=weight_quant_type,\n",
-      "                                   stats_op=stats_op))\n",
+      "                                   quant_type=weight_quant_type))\n",
       "        self.features.append(BatchNorm1d(num_features=num_classes))\n",
       "\n",
       "        for m in self.modules():\n",
@@ -89,17 +122,31 @@
       "    \n",
       "    def forward(self, x):\n",
       "        x = x.view(x.shape[0], -1)\n",
-      "        x = 2.0 * x - torch.tensor([1.0]).to(self.device)\n",
+      "        x = 2.0 * x - torch.tensor([1.0], device=x.device)\n",
       "        for mod in self.features:\n",
       "            x = mod(x)\n",
       "        return x\n",
+      "\n",
+      "\n",
+      "def lfc(cfg):\n",
+      "    weight_bit_width = cfg.getint('QUANT', 'WEIGHT_BIT_WIDTH')\n",
+      "    act_bit_width = cfg.getint('QUANT', 'ACT_BIT_WIDTH')\n",
+      "    in_bit_width = cfg.getint('QUANT', 'IN_BIT_WIDTH')\n",
+      "    num_classes = cfg.getint('MODEL', 'NUM_CLASSES')\n",
+      "    in_channels = cfg.getint('MODEL', 'IN_CHANNELS')\n",
+      "    net = LFC(weight_bit_width=weight_bit_width,\n",
+      "              act_bit_width=act_bit_width,\n",
+      "              in_bit_width=in_bit_width,\n",
+      "              num_classes=num_classes,\n",
+      "              in_ch=in_channels)\n",
+      "    return net\n",
       "\n"
      ]
     }
    ],
    "source": [
-    "from models.LFC import LFC\n",
-    "showSrc(LFC)"
+    "from brevitas_examples import bnn_pynq\n",
+    "showSrc(bnn_pynq.models.LFC)"
    ]
   },
   {
@@ -111,7 +158,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [
     {
@@ -267,18 +314,14 @@
        ")"
       ]
      },
-     "execution_count": 3,
+     "execution_count": 8,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "import torch\n",
-    "\n",
-    "trained_lfc_w1a1_checkpoint = \"/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar\"\n",
-    "lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1).eval()\n",
-    "checkpoint = torch.load(trained_lfc_w1a1_checkpoint, map_location=\"cpu\")\n",
-    "lfc.load_state_dict(checkpoint[\"state_dict\"])\n",
+    "from finn.util.test import get_test_model\n",
+    "lfc = get_test_model(netname = \"LFC\", wbits = 1, abits = 1, pretrained = True)\n",
     "lfc"
    ]
   },
@@ -291,7 +334,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 12,
    "metadata": {},
    "outputs": [
     {
@@ -308,6 +351,7 @@
     }
    ],
    "source": [
+    "import torch\n",
     "import matplotlib.pyplot as plt\n",
     "from pkgutil import get_data\n",
     "import onnx\n",
@@ -321,7 +365,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 13,
    "metadata": {},
    "outputs": [
     {
@@ -331,7 +375,7 @@
        "        0.0141])"
       ]
      },
-     "execution_count": 6,
+     "execution_count": 13,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -346,7 +390,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 14,
    "metadata": {},
    "outputs": [
     {
@@ -390,15 +434,15 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 15,
    "metadata": {},
    "outputs": [
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "/workspace/brevitas_cnv_lfc/training_scripts/models/LFC.py:85: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n",
-      "  x = 2.0 * x - torch.tensor([1.0]).to(self.device)\n"
+      "/workspace/brevitas/brevitas_examples/bnn_pynq/models/LFC.py:80: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n",
+      "  x = 2.0 * x - torch.tensor([1.0], device=x.device)\n"
      ]
     }
    ],
@@ -418,7 +462,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
+   "execution_count": 16,
    "metadata": {},
    "outputs": [
     {
@@ -442,10 +486,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f3d330b6ac8>"
+       "<IPython.lib.display.IFrame at 0x7f3a27be9ac8>"
       ]
      },
-     "execution_count": 9,
+     "execution_count": 16,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -472,19 +516,19 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 10,
+   "execution_count": 17,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "input: \"37\"\n",
+       "input: \"40\"\n",
        "input: \"38\"\n",
-       "output: \"40\"\n",
-       "op_type: \"MatMul\""
+       "output: \"41\"\n",
+       "op_type: \"Add\""
       ]
      },
-     "execution_count": 10,
+     "execution_count": 17,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -504,22 +548,16 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 11,
+   "execution_count": 18,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "array([[-1., -1., -1., ..., -1., -1.,  1.],\n",
-       "       [-1.,  1., -1., ...,  1., -1., -1.],\n",
-       "       [ 1., -1.,  1., ..., -1., -1., -1.],\n",
-       "       ...,\n",
-       "       [ 1.,  1., -1., ...,  1.,  1.,  1.],\n",
-       "       [-1., -1.,  1., ...,  1.,  1., -1.],\n",
-       "       [ 1.,  1., -1., ...,  1., -1., -1.]], dtype=float32)"
+       "array(-0.5, dtype=float32)"
       ]
      },
-     "execution_count": 11,
+     "execution_count": 18,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -537,16 +575,16 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 12,
+   "execution_count": 19,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "<DataType.BIPOLAR: 8>"
+       "<DataType.FLOAT32: 16>"
       ]
      },
-     "execution_count": 12,
+     "execution_count": 19,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -557,16 +595,16 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 13,
+   "execution_count": 20,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "[784, 1024]"
+       "[]"
       ]
      },
-     "execution_count": 13,
+     "execution_count": 20,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -584,7 +622,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 14,
+   "execution_count": 21,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -598,7 +636,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 15,
+   "execution_count": 22,
    "metadata": {},
    "outputs": [
     {
@@ -624,10 +662,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f3d3380aef0>"
+       "<IPython.lib.display.IFrame at 0x7f3a27b49e10>"
       ]
      },
-     "execution_count": 15,
+     "execution_count": 22,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -645,7 +683,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 16,
+   "execution_count": 23,
    "metadata": {},
    "outputs": [
     {
@@ -656,7 +694,7 @@
        "      dtype=float32)"
       ]
      },
-     "execution_count": 16,
+     "execution_count": 23,
      "metadata": {},
      "output_type": "execute_result"
     }
diff --git a/notebooks/end2end_example/StreamingDataflowPartition_1.pdf b/notebooks/end2end_example/StreamingDataflowPartition_1.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..4fedb667e682e682e1cb8c1935d9d4f0db18b251
--- /dev/null
+++ b/notebooks/end2end_example/StreamingDataflowPartition_1.pdf
@@ -0,0 +1,4211 @@
+%PDF-1.4
+1 0 obj
+<<
+  /Title    (StreamingDataflowPartition_1_imp)
+  /Author   (maltanar)
+  /Producer (Concept Engineering GmbH)
+  /Creator  (Nlview 7.0.19  2019-03-26 bk=1.5019 VDI=41 GEI=35)
+  /CreationDate (D:20200921143120)
+>>
+endobj
+2 0 obj
+<<
+  /Type     /Catalog
+  /Pages    3 0 R
+  /Outlines 7 0 R
+  /PageMode /UseThumbs
+  /ViewerPreferences << /DisplayDocTitle true >>
+>>
+endobj
+4 0 obj
+<<
+  /Type     /Font
+  /Subtype  /Type1
+  /Name     /F1
+  /BaseFont /Helvetica
+  /Encoding /MacRomanEncoding
+>>
+endobj
+5 0 obj
+<<
+  /ExtGState  6 0 R
+  /Font       << /F1 4 0 R >>
+  /ColorSpace << /PCS [/Pattern /DeviceRGB] >>
+  /Pattern    8 0 R
+  /XObject    9 0 R
+>>
+endobj
+%
+% Nlview page 1
+% (user space scaling 0.129715)
+%
+10 0 obj
+<<
+  /Type      /Page
+  /Parent    3 0 R
+  /Resources 5 0 R
+  /Contents  11 0 R
+  /MediaBox  [0 0 612 792]
+  /Rotate    0
+>>
+endobj
+11 0 obj
+<<
+  /Length 40637
+>>
+stream
+1 0 0 1 0 344.5 cm
+1 0 0 1 28.8 28.8 cm
+0.129715 0 0 -0.129715 0 0 cm
+1 0 0 1 0 -350 cm
+0 0 4274 350 re
+W n
+/GS gs
+1 0 0 1 109 10 cm
+q
+1.000 1.000 1.000 rg
+/GSa0 gs
+-109 -10 4274 351 re
+f
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3537 192 m
+3763 192 l
+3763 192 l
+3766 193 l
+3768 194 l
+3769 196 l
+3770 199 l
+3770 199 l
+3770 281 l
+3770 281 l
+3769 284 l
+3768 286 l
+3766 287 l
+3763 288 l
+3763 288 l
+3537 288 l
+3537 288 l
+3534 287 l
+3532 286 l
+3531 284 l
+3530 281 l
+3530 281 l
+3530 199 l
+3530 199 l
+3531 196 l
+3532 194 l
+3534 193 l
+h f
+Q
+q
+1 0 0 1 3650 240 cm
+1 0 0 1 -32 -32 cm
+64 0 0 -64 0 64 cm /Im0 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3650 190 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-139.398 2.484 Td
+(StreamingDataflowPartition_1_LabelSelect_Batch_0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 3650 290 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-179.742 -8.616 Td
+(Streamingdataflowpartition_1_labelselect_batch_0 \(Pre-Production\)) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3520 211 10 18 re
+f
+Q
+q
+1 0 0 1 3539.5 220.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 3525 220 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3547 220 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3770 231 10 18 re
+f
+Q
+q
+1 0 0 1 3760.5 240.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 3775 240 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3753 240 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+[] 0 d
+3 w
+0.000 0.000 0.000 RG
+/GSA0 gs
+3520 240 m
+3530 240 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3532 240 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+3530 260 m
+3530 258.343 3528.66 257 3527 257 c
+3525.34 257 3524 258.343 3524 260 c
+3524 261.657 3525.34 263 3527 263 c
+3528.66 263 3530 261.657 3530 260 c
+S
+3520 260 m
+3524 260 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3532 260 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+3537 192 m
+3763 192 l
+S
+3770 199 m
+3770 198.889 3770 198.778 3770 198.667 c
+3769.91 194.893 3766.77 191.908 3763 192 c
+S
+3770 199 m
+3770 281 l
+S
+3763 288 m
+3763.11 288.003 3763.22 288.003 3763.33 288 c
+3767.11 287.908 3770.09 284.774 3770 281 c
+S
+3763 288 m
+3537 288 l
+S
+3530 281 m
+3530 281.111 3530 281.222 3530 281.333 c
+3530.09 285.107 3533.23 288.092 3537 288 c
+S
+3530 281 m
+3530 199 l
+S
+3537 192 m
+3536.89 191.997 3536.78 191.997 3536.67 192 c
+3532.89 192.092 3529.91 195.226 3530 199 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1297 92 m
+1523 92 l
+1523 92 l
+1526 93 l
+1528 94 l
+1529 96 l
+1530 99 l
+1530 99 l
+1530 181 l
+1530 181 l
+1529 184 l
+1528 186 l
+1526 187 l
+1523 188 l
+1523 188 l
+1297 188 l
+1297 188 l
+1294 187 l
+1292 186 l
+1291 184 l
+1290 181 l
+1290 181 l
+1290 99 l
+1290 99 l
+1291 96 l
+1292 94 l
+1294 93 l
+h f
+Q
+q
+1 0 0 1 1410 140 cm
+1 0 0 1 -32 -32 cm
+64 0 0 -64 0 64 cm /Im4 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1410 90 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-189.738 2.484 Td
+(StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 1410 190 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-228.426 -8.616 Td
+(Streamingdataflowpartition_1_streamingdatawidthconverter_batch_0 \(Pre-Production\)) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1280 111 10 18 re
+f
+Q
+q
+1 0 0 1 1299.5 120.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1285 120 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1307 120 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1530 131 10 18 re
+f
+Q
+q
+1 0 0 1 1520.5 140.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1535 140 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1513 140 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1280 140 m
+1290 140 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1292 140 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+1290 160 m
+1290 158.343 1288.66 157 1287 157 c
+1285.34 157 1284 158.343 1284 160 c
+1284 161.657 1285.34 163 1287 163 c
+1288.66 163 1290 161.657 1290 160 c
+S
+1280 160 m
+1284 160 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1292 160 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1297 92 m
+1523 92 l
+S
+1530 99 m
+1530 98.8889 1530 98.7778 1530 98.6667 c
+1529.91 94.8927 1526.77 91.908 1523 92 c
+S
+1530 99 m
+1530 181 l
+S
+1523 188 m
+1523.11 188.003 1523.22 188.003 1523.33 188 c
+1527.11 187.908 1530.09 184.774 1530 181 c
+S
+1523 188 m
+1297 188 l
+S
+1290 181 m
+1290 181.111 1290 181.222 1290 181.333 c
+1290.09 185.107 1293.23 188.092 1297 188 c
+S
+1290 181 m
+1290 99 l
+S
+1297 92 m
+1296.89 91.9973 1296.78 91.9973 1296.67 92 c
+1292.89 92.092 1289.91 95.2261 1290 99 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3227 172 m
+3453 172 l
+3453 172 l
+3456 173 l
+3458 174 l
+3459 176 l
+3460 179 l
+3460 179 l
+3460 261 l
+3460 261 l
+3459 264 l
+3458 266 l
+3456 267 l
+3453 268 l
+3453 268 l
+3227 268 l
+3227 268 l
+3224 267 l
+3222 266 l
+3221 264 l
+3220 261 l
+3220 261 l
+3220 179 l
+3220 179 l
+3221 176 l
+3222 174 l
+3224 173 l
+h f
+Q
+q
+1 0 0 1 3340 220 cm
+1 0 0 1 -32 -32 cm
+64 0 0 -64 0 64 cm /Im5 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3340 170 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-189.738 2.484 Td
+(StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_1) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 3340 270 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-228.426 -8.616 Td
+(Streamingdataflowpartition_1_streamingdatawidthconverter_batch_1 \(Pre-Production\)) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3210 191 10 18 re
+f
+Q
+q
+1 0 0 1 3229.5 200.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 3215 200 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3237 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3460 211 10 18 re
+f
+Q
+q
+1 0 0 1 3450.5 220.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 3465 220 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3443 220 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+3210 220 m
+3220 220 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3222 220 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+3220 240 m
+3220 238.343 3218.66 237 3217 237 c
+3215.34 237 3214 238.343 3214 240 c
+3214 241.657 3215.34 243 3217 243 c
+3218.66 243 3220 241.657 3220 240 c
+S
+3210 240 m
+3214 240 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3222 240 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+3227 172 m
+3453 172 l
+S
+3460 179 m
+3460 178.889 3460 178.778 3460 178.667 c
+3459.91 174.893 3456.77 171.908 3453 172 c
+S
+3460 179 m
+3460 261 l
+S
+3453 268 m
+3453.11 268.003 3453.22 268.003 3453.33 268 c
+3457.11 267.908 3460.09 264.774 3460 261 c
+S
+3453 268 m
+3227 268 l
+S
+3220 261 m
+3220 261.111 3220 261.222 3220 261.333 c
+3220.09 265.107 3223.23 268.092 3227 268 c
+S
+3220 261 m
+3220 179 l
+S
+3227 172 m
+3226.89 171.997 3226.78 171.997 3226.67 172 c
+3222.89 172.092 3219.91 175.226 3220 179 c
+S
+q
+0.667 0.769 0.969 rg
+/GSa0 gs
+837 62 m
+983 62 l
+983 62 l
+986 63 l
+988 64 l
+989 66 l
+990 69 l
+990 69 l
+990 151 l
+990 151 l
+989 154 l
+988 156 l
+986 157 l
+983 158 l
+983 158 l
+837 158 l
+837 158 l
+834 157 l
+832 156 l
+831 154 l
+830 151 l
+830 151 l
+830 69 l
+830 69 l
+831 66 l
+832 64 l
+834 63 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 910 60 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-158.394 2.484 Td
+(StreamingDataflowPartition_1_StreamingFCLayer_Batch_0) Tj
+ET
+Q
+q
+1 0 0 1 841 73 cm
+1 0 0 1 -8 -8 cm
+16 0 0 -16 0 16 cm /Im6 Do
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+990 101 10 18 re
+f
+Q
+q
+1 0 0 1 980.5 110.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 995 110 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 973 110 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+820 81 10 18 re
+f
+Q
+q
+1 0 0 1 839.5 90.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 825 90 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 847 90 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+820 110 m
+830 110 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 832 110 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+820 130 m
+830 130 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 832 130 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+837 62 m
+983 62 l
+S
+990 69 m
+990.003 68.8889 990.003 68.7778 990 68.6667 c
+989.908 64.8927 986.774 61.908 983 62 c
+S
+990 69 m
+990 151 l
+S
+983 158 m
+983.111 158.003 983.222 158.003 983.333 158 c
+987.107 157.908 990.092 154.774 990 151 c
+S
+983 158 m
+837 158 l
+S
+830 151 m
+829.997 151.111 829.997 151.222 830 151.333 c
+830.092 155.107 833.226 158.092 837 158 c
+S
+830 151 m
+830 69 l
+S
+837 62 m
+836.889 61.9973 836.778 61.9973 836.667 62 c
+832.893 62.092 829.908 65.2261 830 69 c
+S
+q
+0.667 0.769 0.969 rg
+/GSa0 gs
+1837 122 m
+1983 122 l
+1983 122 l
+1986 123 l
+1988 124 l
+1989 126 l
+1990 129 l
+1990 129 l
+1990 211 l
+1990 211 l
+1989 214 l
+1988 216 l
+1986 217 l
+1983 218 l
+1983 218 l
+1837 218 l
+1837 218 l
+1834 217 l
+1832 216 l
+1831 214 l
+1830 211 l
+1830 211 l
+1830 129 l
+1830 129 l
+1831 126 l
+1832 124 l
+1834 123 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1910 120 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-158.394 2.484 Td
+(StreamingDataflowPartition_1_StreamingFCLayer_Batch_1) Tj
+ET
+Q
+q
+1 0 0 1 1841 133 cm
+1 0 0 1 -8 -8 cm
+16 0 0 -16 0 16 cm /Im6 Do
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1990 161 10 18 re
+f
+Q
+q
+1 0 0 1 1980.5 170.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1995 170 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1973 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1820 141 10 18 re
+f
+Q
+q
+1 0 0 1 1839.5 150.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1825 150 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1847 150 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1820 170 m
+1830 170 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1832 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+1820 190 m
+1830 190 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1832 190 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1837 122 m
+1983 122 l
+S
+1990 129 m
+1990 128.889 1990 128.778 1990 128.667 c
+1989.91 124.893 1986.77 121.908 1983 122 c
+S
+1990 129 m
+1990 211 l
+S
+1983 218 m
+1983.11 218.003 1983.22 218.003 1983.33 218 c
+1987.11 217.908 1990.09 214.774 1990 211 c
+S
+1983 218 m
+1837 218 l
+S
+1830 211 m
+1830 211.111 1830 211.222 1830 211.333 c
+1830.09 215.107 1833.23 218.092 1837 218 c
+S
+1830 211 m
+1830 129 l
+S
+1837 122 m
+1836.89 121.997 1836.78 121.997 1836.67 122 c
+1832.89 122.092 1829.91 125.226 1830 129 c
+S
+q
+0.667 0.769 0.969 rg
+/GSa0 gs
+2297 152 m
+2443 152 l
+2443 152 l
+2446 153 l
+2448 154 l
+2449 156 l
+2450 159 l
+2450 159 l
+2450 241 l
+2450 241 l
+2449 244 l
+2448 246 l
+2446 247 l
+2443 248 l
+2443 248 l
+2297 248 l
+2297 248 l
+2294 247 l
+2292 246 l
+2291 244 l
+2290 241 l
+2290 241 l
+2290 159 l
+2290 159 l
+2291 156 l
+2292 154 l
+2294 153 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2370 150 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-158.394 2.484 Td
+(StreamingDataflowPartition_1_StreamingFCLayer_Batch_2) Tj
+ET
+Q
+q
+1 0 0 1 2301 163 cm
+1 0 0 1 -8 -8 cm
+16 0 0 -16 0 16 cm /Im6 Do
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2450 191 10 18 re
+f
+Q
+q
+1 0 0 1 2440.5 200.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2455 200 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2433 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2280 171 10 18 re
+f
+Q
+q
+1 0 0 1 2299.5 180.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2285 180 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2307 180 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+2280 200 m
+2290 200 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2292 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+2280 220 m
+2290 220 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2292 220 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+2297 152 m
+2443 152 l
+S
+2450 159 m
+2450 158.889 2450 158.778 2450 158.667 c
+2449.91 154.893 2446.77 151.908 2443 152 c
+S
+2450 159 m
+2450 241 l
+S
+2443 248 m
+2443.11 248.003 2443.22 248.003 2443.33 248 c
+2447.11 247.908 2450.09 244.774 2450 241 c
+S
+2443 248 m
+2297 248 l
+S
+2290 241 m
+2290 241.111 2290 241.222 2290 241.333 c
+2290.09 245.107 2293.23 248.092 2297 248 c
+S
+2290 241 m
+2290 159 l
+S
+2297 152 m
+2296.89 151.997 2296.78 151.997 2296.67 152 c
+2292.89 152.092 2289.91 155.226 2290 159 c
+S
+q
+0.667 0.769 0.969 rg
+/GSa0 gs
+2767 142 m
+2913 142 l
+2913 142 l
+2916 143 l
+2918 144 l
+2919 146 l
+2920 149 l
+2920 149 l
+2920 231 l
+2920 231 l
+2919 234 l
+2918 236 l
+2916 237 l
+2913 238 l
+2913 238 l
+2767 238 l
+2767 238 l
+2764 237 l
+2762 236 l
+2761 234 l
+2760 231 l
+2760 231 l
+2760 149 l
+2760 149 l
+2761 146 l
+2762 144 l
+2764 143 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2840 140 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-158.394 2.484 Td
+(StreamingDataflowPartition_1_StreamingFCLayer_Batch_3) Tj
+ET
+Q
+q
+1 0 0 1 2771 153 cm
+1 0 0 1 -8 -8 cm
+16 0 0 -16 0 16 cm /Im6 Do
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2920 181 10 18 re
+f
+Q
+q
+1 0 0 1 2910.5 190.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2925 190 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2903 190 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2750 161 10 18 re
+f
+Q
+q
+1 0 0 1 2769.5 170.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2755 170 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2777 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+2750 190 m
+2760 190 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2762 190 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+2750 210 m
+2760 210 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2762 210 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+2767 142 m
+2913 142 l
+S
+2920 149 m
+2920 148.889 2920 148.778 2920 148.667 c
+2919.91 144.893 2916.77 141.908 2913 142 c
+S
+2920 149 m
+2920 231 l
+S
+2913 238 m
+2913.11 238.003 2913.22 238.003 2913.33 238 c
+2917.11 237.908 2920.09 234.774 2920 231 c
+S
+2913 238 m
+2767 238 l
+S
+2760 231 m
+2760 231.111 2760 231.222 2760 231.333 c
+2760.09 235.107 2763.23 238.092 2767 238 c
+S
+2760 231 m
+2760 149 l
+S
+2767 142 m
+2766.89 141.997 2766.78 141.997 2766.67 142 c
+2762.89 142.092 2759.91 145.226 2760 149 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+67 22 m
+213 22 l
+213 22 l
+216 23 l
+218 24 l
+219 26 l
+220 29 l
+220 29 l
+220 111 l
+220 111 l
+219 114 l
+218 116 l
+216 117 l
+213 118 l
+213 118 l
+67 118 l
+67 118 l
+64 117 l
+62 116 l
+61 114 l
+60 111 l
+60 111 l
+60 29 l
+60 29 l
+61 26 l
+62 24 l
+64 23 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 140 20 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 140 120 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+50 41 10 18 re
+f
+Q
+q
+1 0 0 1 69.5 50.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 55 50 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 77 50 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+220 51 10 18 re
+f
+Q
+q
+1 0 0 1 210.5 60.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 225 60 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 203 60 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+50 70 m
+60 70 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 62 70 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+60 90 m
+60 88.3431 58.6569 87 57 87 c
+55.3431 87 54 88.3431 54 90 c
+54 91.6569 55.3431 93 57 93 c
+58.6569 93 60 91.6569 60 90 c
+S
+50 90 m
+54 90 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 62 90 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+230 80 m
+220 80 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 218 80 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[0:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+67 22 m
+213 22 l
+S
+220 29 m
+220.003 28.8889 220.003 28.7778 220 28.6667 c
+219.908 24.8927 216.774 21.908 213 22 c
+S
+220 29 m
+220 111 l
+S
+213 118 m
+213.111 118.003 213.222 118.003 213.333 118 c
+217.107 117.908 220.092 114.774 220 111 c
+S
+213 118 m
+67 118 l
+S
+60 111 m
+59.9973 111.111 59.9973 111.222 60 111.333 c
+60.092 115.107 63.2261 118.092 67 118 c
+S
+60 111 m
+60 29 l
+S
+67 22 m
+66.8889 21.9973 66.7778 21.9973 66.6667 22 c
+62.8927 22.092 59.908 25.2261 60 29 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+607 52 m
+753 52 l
+753 52 l
+756 53 l
+758 54 l
+759 56 l
+760 59 l
+760 59 l
+760 141 l
+760 141 l
+759 144 l
+758 146 l
+756 147 l
+753 148 l
+753 148 l
+607 148 l
+607 148 l
+604 147 l
+602 146 l
+601 144 l
+600 141 l
+600 141 l
+600 59 l
+600 59 l
+601 56 l
+602 54 l
+604 53 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 680 50 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_1) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 680 150 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_1) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+590 71 10 18 re
+f
+Q
+q
+1 0 0 1 609.5 80.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 595 80 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 617 80 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+760 81 10 18 re
+f
+Q
+q
+1 0 0 1 750.5 90.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 765 90 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 743 90 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+590 100 m
+600 100 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 602 100 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+600 120 m
+600 118.343 598.657 117 597 117 c
+595.343 117 594 118.343 594 120 c
+594 121.657 595.343 123 597 123 c
+598.657 123 600 121.657 600 120 c
+S
+590 120 m
+594 120 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 602 120 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+770 110 m
+760 110 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 758 110 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[3:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+607 52 m
+753 52 l
+S
+760 59 m
+760.003 58.8889 760.003 58.7778 760 58.6667 c
+759.908 54.8927 756.774 51.908 753 52 c
+S
+760 59 m
+760 141 l
+S
+753 148 m
+753.111 148.003 753.222 148.003 753.333 148 c
+757.107 147.908 760.092 144.774 760 141 c
+S
+753 148 m
+607 148 l
+S
+600 141 m
+599.997 141.111 599.997 141.222 600 141.333 c
+600.092 145.107 603.226 148.092 607 148 c
+S
+600 141 m
+600 59 l
+S
+607 52 m
+606.889 51.9973 606.778 51.9973 606.667 52 c
+602.893 52.092 599.908 55.2261 600 59 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1067 82 m
+1213 82 l
+1213 82 l
+1216 83 l
+1218 84 l
+1219 86 l
+1220 89 l
+1220 89 l
+1220 171 l
+1220 171 l
+1219 174 l
+1218 176 l
+1216 177 l
+1213 178 l
+1213 178 l
+1067 178 l
+1067 178 l
+1064 177 l
+1062 176 l
+1061 174 l
+1060 171 l
+1060 171 l
+1060 89 l
+1060 89 l
+1061 86 l
+1062 84 l
+1064 83 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1140 80 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_2) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 1140 180 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_2) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1050 101 10 18 re
+f
+Q
+q
+1 0 0 1 1069.5 110.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1055 110 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1077 110 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1220 111 10 18 re
+f
+Q
+q
+1 0 0 1 1210.5 120.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1225 120 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1203 120 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1050 130 m
+1060 130 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1062 130 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+1060 150 m
+1060 148.343 1058.66 147 1057 147 c
+1055.34 147 1054 148.343 1054 150 c
+1054 151.657 1055.34 153 1057 153 c
+1058.66 153 1060 151.657 1060 150 c
+S
+1050 150 m
+1054 150 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1062 150 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+1230 140 m
+1220 140 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1218 140 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[5:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1067 82 m
+1213 82 l
+S
+1220 89 m
+1220 88.8889 1220 88.7778 1220 88.6667 c
+1219.91 84.8927 1216.77 81.908 1213 82 c
+S
+1220 89 m
+1220 171 l
+S
+1213 178 m
+1213.11 178.003 1213.22 178.003 1213.33 178 c
+1217.11 177.908 1220.09 174.774 1220 171 c
+S
+1213 178 m
+1067 178 l
+S
+1060 171 m
+1060 171.111 1060 171.222 1060 171.333 c
+1060.09 175.107 1063.23 178.092 1067 178 c
+S
+1060 171 m
+1060 89 l
+S
+1067 82 m
+1066.89 81.9973 1066.78 81.9973 1066.67 82 c
+1062.89 82.092 1059.91 85.2261 1060 89 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1607 112 m
+1753 112 l
+1753 112 l
+1756 113 l
+1758 114 l
+1759 116 l
+1760 119 l
+1760 119 l
+1760 201 l
+1760 201 l
+1759 204 l
+1758 206 l
+1756 207 l
+1753 208 l
+1753 208 l
+1607 208 l
+1607 208 l
+1604 207 l
+1602 206 l
+1601 204 l
+1600 201 l
+1600 201 l
+1600 119 l
+1600 119 l
+1601 116 l
+1602 114 l
+1604 113 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1680 110 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_3) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 1680 210 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_3) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1590 131 10 18 re
+f
+Q
+q
+1 0 0 1 1609.5 140.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1595 140 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1617 140 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1760 141 10 18 re
+f
+Q
+q
+1 0 0 1 1750.5 150.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 1765 150 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1743 150 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1590 160 m
+1600 160 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1602 160 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+1600 180 m
+1600 178.343 1598.66 177 1597 177 c
+1595.34 177 1594 178.343 1594 180 c
+1594 181.657 1595.34 183 1597 183 c
+1598.66 183 1600 181.657 1600 180 c
+S
+1590 180 m
+1594 180 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1602 180 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+1770 170 m
+1760 170 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1758 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[5:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1607 112 m
+1753 112 l
+S
+1760 119 m
+1760 118.889 1760 118.778 1760 118.667 c
+1759.91 114.893 1756.77 111.908 1753 112 c
+S
+1760 119 m
+1760 201 l
+S
+1753 208 m
+1753.11 208.003 1753.22 208.003 1753.33 208 c
+1757.11 207.908 1760.09 204.774 1760 201 c
+S
+1753 208 m
+1607 208 l
+S
+1600 201 m
+1600 201.111 1600 201.222 1600 201.333 c
+1600.09 205.107 1603.23 208.092 1607 208 c
+S
+1600 201 m
+1600 119 l
+S
+1607 112 m
+1606.89 111.997 1606.78 111.997 1606.67 112 c
+1602.89 112.092 1599.91 115.226 1600 119 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2067 142 m
+2213 142 l
+2213 142 l
+2216 143 l
+2218 144 l
+2219 146 l
+2220 149 l
+2220 149 l
+2220 231 l
+2220 231 l
+2219 234 l
+2218 236 l
+2216 237 l
+2213 238 l
+2213 238 l
+2067 238 l
+2067 238 l
+2064 237 l
+2062 236 l
+2061 234 l
+2060 231 l
+2060 231 l
+2060 149 l
+2060 149 l
+2061 146 l
+2062 144 l
+2064 143 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2140 140 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_4) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 2140 240 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_4) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2050 161 10 18 re
+f
+Q
+q
+1 0 0 1 2069.5 170.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2055 170 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2077 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2220 171 10 18 re
+f
+Q
+q
+1 0 0 1 2210.5 180.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2225 180 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2203 180 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+2050 190 m
+2060 190 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2062 190 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+2060 210 m
+2060 208.343 2058.66 207 2057 207 c
+2055.34 207 2054 208.343 2054 210 c
+2054 211.657 2055.34 213 2057 213 c
+2058.66 213 2060 211.657 2060 210 c
+S
+2050 210 m
+2054 210 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2062 210 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+2230 200 m
+2220 200 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2218 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[5:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+2067 142 m
+2213 142 l
+S
+2220 149 m
+2220 148.889 2220 148.778 2220 148.667 c
+2219.91 144.893 2216.77 141.908 2213 142 c
+S
+2220 149 m
+2220 231 l
+S
+2213 238 m
+2213.11 238.003 2213.22 238.003 2213.33 238 c
+2217.11 237.908 2220.09 234.774 2220 231 c
+S
+2213 238 m
+2067 238 l
+S
+2060 231 m
+2060 231.111 2060 231.222 2060 231.333 c
+2060.09 235.107 2063.23 238.092 2067 238 c
+S
+2060 231 m
+2060 149 l
+S
+2067 142 m
+2066.89 141.997 2066.78 141.997 2066.67 142 c
+2062.89 142.092 2059.91 145.226 2060 149 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2537 132 m
+2683 132 l
+2683 132 l
+2686 133 l
+2688 134 l
+2689 136 l
+2690 139 l
+2690 139 l
+2690 221 l
+2690 221 l
+2689 224 l
+2688 226 l
+2686 227 l
+2683 228 l
+2683 228 l
+2537 228 l
+2537 228 l
+2534 227 l
+2532 226 l
+2531 224 l
+2530 221 l
+2530 221 l
+2530 139 l
+2530 139 l
+2531 136 l
+2532 134 l
+2534 133 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2610 130 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_5) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 2610 230 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_5) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2520 151 10 18 re
+f
+Q
+q
+1 0 0 1 2539.5 160.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2525 160 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2547 160 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2690 161 10 18 re
+f
+Q
+q
+1 0 0 1 2680.5 170.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2695 170 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2673 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+2520 180 m
+2530 180 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2532 180 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+2530 200 m
+2530 198.343 2528.66 197 2527 197 c
+2525.34 197 2524 198.343 2524 200 c
+2524 201.657 2525.34 203 2527 203 c
+2528.66 203 2530 201.657 2530 200 c
+S
+2520 200 m
+2524 200 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2532 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+2700 190 m
+2690 190 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2688 190 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[5:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+2537 132 m
+2683 132 l
+S
+2690 139 m
+2690 138.889 2690 138.778 2690 138.667 c
+2689.91 134.893 2686.77 131.908 2683 132 c
+S
+2690 139 m
+2690 221 l
+S
+2683 228 m
+2683.11 228.003 2683.22 228.003 2683.33 228 c
+2687.11 227.908 2690.09 224.774 2690 221 c
+S
+2683 228 m
+2537 228 l
+S
+2530 221 m
+2530 221.111 2530 221.222 2530 221.333 c
+2530.09 225.107 2533.23 228.092 2537 228 c
+S
+2530 221 m
+2530 139 l
+S
+2537 132 m
+2536.89 131.997 2536.78 131.997 2536.67 132 c
+2532.89 132.092 2529.91 135.226 2530 139 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2997 162 m
+3143 162 l
+3143 162 l
+3146 163 l
+3148 164 l
+3149 166 l
+3150 169 l
+3150 169 l
+3150 251 l
+3150 251 l
+3149 254 l
+3148 256 l
+3146 257 l
+3143 258 l
+3143 258 l
+2997 258 l
+2997 258 l
+2994 257 l
+2992 256 l
+2991 254 l
+2990 251 l
+2990 251 l
+2990 169 l
+2990 169 l
+2991 166 l
+2992 164 l
+2994 163 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3070 160 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_6) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 3070 260 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_6) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2980 181 10 18 re
+f
+Q
+q
+1 0 0 1 2999.5 190.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 2985 190 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3007 190 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3150 191 10 18 re
+f
+Q
+q
+1 0 0 1 3140.5 200.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 3155 200 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3133 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+2980 210 m
+2990 210 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2992 210 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+2990 230 m
+2990 228.343 2988.66 227 2987 227 c
+2985.34 227 2984 228.343 2984 230 c
+2984 231.657 2985.34 233 2987 233 c
+2988.66 233 2990 231.657 2990 230 c
+S
+2980 230 m
+2984 230 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2992 230 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+3160 220 m
+3150 220 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3148 220 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[3:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+2997 162 m
+3143 162 l
+S
+3150 169 m
+3150 168.889 3150 168.778 3150 168.667 c
+3149.91 164.893 3146.77 161.908 3143 162 c
+S
+3150 169 m
+3150 251 l
+S
+3143 258 m
+3143.11 258.003 3143.22 258.003 3143.33 258 c
+3147.11 257.908 3150.09 254.774 3150 251 c
+S
+3143 258 m
+2997 258 l
+S
+2990 251 m
+2990 251.111 2990 251.222 2990 251.333 c
+2990.09 255.107 2993.23 258.092 2997 258 c
+S
+2990 251 m
+2990 169 l
+S
+2997 162 m
+2996.89 161.997 2996.78 161.997 2996.67 162 c
+2992.89 162.092 2989.91 165.226 2990 169 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3847 212 m
+3993 212 l
+3993 212 l
+3996 213 l
+3998 214 l
+3999 216 l
+4000 219 l
+4000 219 l
+4000 301 l
+4000 301 l
+3999 304 l
+3998 306 l
+3996 307 l
+3993 308 l
+3993 308 l
+3847 308 l
+3847 308 l
+3844 307 l
+3842 306 l
+3841 304 l
+3840 301 l
+3840 301 l
+3840 219 l
+3840 219 l
+3841 216 l
+3842 214 l
+3844 213 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3920 210 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 2.484 Td
+(StreamingDataflowPartition_1_StreamingFIFO_7) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 3920 310 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-130.38 -8.616 Td
+(StreamingDataflowPartition_1_StreamingFIFO_7) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+3830 231 10 18 re
+f
+Q
+q
+1 0 0 1 3849.5 240.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 3835 240 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3857 240 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+4000 241 10 18 re
+f
+Q
+q
+1 0 0 1 3990.5 250.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 4005 250 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3983 250 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+3830 260 m
+3840 260 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3842 260 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+3840 280 m
+3840 278.343 3838.66 277 3837 277 c
+3835.34 277 3834 278.343 3834 280 c
+3834 281.657 3835.34 283 3837 283 c
+3838.66 283 3840 281.657 3840 280 c
+S
+3830 280 m
+3834 280 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3842 280 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+5 w
+4010 270 m
+4000 270 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 3998 270 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-43.92 -3.59 Td
+(count[0:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+3847 212 m
+3993 212 l
+S
+4000 219 m
+4000 218.889 4000 218.778 4000 218.667 c
+3999.91 214.893 3996.77 211.908 3993 212 c
+S
+4000 219 m
+4000 301 l
+S
+3993 308 m
+3993.11 308.003 3993.22 308.003 3993.33 308 c
+3997.11 307.908 4000.09 304.774 4000 301 c
+S
+3993 308 m
+3847 308 l
+S
+3840 301 m
+3840 301.111 3840 301.222 3840 301.333 c
+3840.09 305.107 3843.23 308.092 3847 308 c
+S
+3840 301 m
+3840 219 l
+S
+3847 212 m
+3846.89 211.997 3846.78 211.997 3846.67 212 c
+3842.89 212.092 3839.91 215.226 3840 219 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+297 32 m
+523 32 l
+523 32 l
+526 33 l
+528 34 l
+529 36 l
+530 39 l
+530 39 l
+530 121 l
+530 121 l
+529 124 l
+528 126 l
+526 127 l
+523 128 l
+523 128 l
+297 128 l
+297 128 l
+294 127 l
+292 126 l
+291 124 l
+290 121 l
+290 121 l
+290 39 l
+290 39 l
+291 36 l
+292 34 l
+294 33 l
+h f
+Q
+q
+1 0 0 1 410 80 cm
+1 0 0 1 -32 -32 cm
+64 0 0 -64 0 64 cm /Im7 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 410 30 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-142.728 2.484 Td
+(StreamingDataflowPartition_1_Thresholding_Batch_0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 410 130 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-184.08 -8.616 Td
+(Streamingdataflowpartition_1_thresholding_batch_0 \(Pre-Production\)) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+280 51 10 18 re
+f
+Q
+q
+1 0 0 1 299.5 60.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 285 60 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 307 60 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(in0_V_V) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+530 71 10 18 re
+f
+Q
+q
+1 0 0 1 520.5 80.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im1 Do
+Q
+q
+1 0 0 1 535 80 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 513 80 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-38.36 -3.59 Td
+(out_V_V) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+280 80 m
+290 80 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 292 80 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+290 100 m
+290 98.3431 288.657 97 287 97 c
+285.343 97 284 98.3431 284 100 c
+284 101.657 285.343 103 287 103 c
+288.657 103 290 101.657 290 100 c
+S
+280 100 m
+284 100 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 292 100 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+297 32 m
+523 32 l
+S
+530 39 m
+530.003 38.8889 530.003 38.7778 530 38.6667 c
+529.908 34.8927 526.774 31.908 523 32 c
+S
+530 39 m
+530 121 l
+S
+523 128 m
+523.111 128.003 523.222 128.003 523.333 128 c
+527.107 127.908 530.092 124.774 530 121 c
+S
+523 128 m
+297 128 l
+S
+290 121 m
+289.997 121.111 289.997 121.222 290 121.333 c
+290.092 125.107 293.226 128.092 297 128 c
+S
+290 121 m
+290 39 l
+S
+297 32 m
+296.889 31.9973 296.778 31.9973 296.667 32 c
+292.893 32.092 289.908 35.2261 290 39 c
+S
+q
+0.867 0.831 0.816 rg
+/GSa0 gs
+0 70 m
+-7 77 l
+-21 77 l
+-21 63 l
+-7 63 l
+h f
+Q
+0.063 0.133 0.208 RG
+0 70 m
+-7 77 l
+-21 77 l
+-21 63 l
+-7 63 l
+h S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 -25 70 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-34.68 -4.308 Td
+(ap_clk) Tj
+ET
+Q
+q
+0.867 0.831 0.816 rg
+/GSa0 gs
+0 90 m
+-7 97 l
+-21 97 l
+-21 83 l
+-7 83 l
+h f
+Q
+0 90 m
+-7 97 l
+-21 97 l
+-21 83 l
+-7 83 l
+h S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 -25 90 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-46.692 -4.308 Td
+(ap_rst_n) Tj
+ET
+Q
+q
+0.867 0.831 0.816 rg
+/GSa0 gs
+4050 250 m
+4050 243 l
+4064 243 l
+4071 250 l
+4064 257 l
+4050 257 l
+h f
+Q
+3 w
+0.165 0.369 0.435 RG
+4050 250 m
+4050 243 l
+4064 243 l
+4071 250 l
+4064 257 l
+4050 257 l
+h S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 4075 250 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+0 -4.308 Td
+(m_axis_0) Tj
+ET
+Q
+q
+0.867 0.831 0.816 rg
+/GSa0 gs
+0 50 m
+-7 57 l
+-21 57 l
+-21 43 l
+-7 43 l
+h f
+Q
+0 50 m
+-7 57 l
+-21 57 l
+-21 43 l
+-7 43 l
+h S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 -25 50 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-47.352 -4.308 Td
+(s_axis_0) Tj
+ET
+Q
+4 w
+0.255 0.380 0.624 RG
+3780 240 m
+3830 240 l
+S
+1540 140 m
+1590 140 l
+S
+3470 220 m
+3520 220 l
+S
+1000 110 m
+1050 110 l
+S
+2000 170 m
+2050 170 l
+S
+2460 200 m
+2480 200 l
+2480 160 l
+2520 160 l
+S
+2930 190 m
+2980 190 l
+S
+230 60 m
+280 60 l
+S
+770 90 m
+820 90 l
+S
+1230 120 m
+1280 120 l
+S
+1770 150 m
+1820 150 l
+S
+2230 180 m
+2280 180 l
+S
+2700 170 m
+2750 170 l
+S
+3160 200 m
+3210 200 l
+S
+4010 250 m
+4050 250 l
+S
+540 80 m
+590 80 l
+S
+1 w
+0.063 0.133 0.208 RG
+0 70 m
+30 70 l
+30 150 l
+260 150 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+258 148 5 5 re
+f
+Q
+590 100 m
+560 100 l
+560 180 l
+790 180 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+788 178 5 5 re
+f
+Q
+1050 130 m
+1020 130 l
+1020 210 l
+1260 210 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1258 208 5 5 re
+f
+Q
+1590 160 m
+1560 160 l
+1560 240 l
+1790 240 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1788 238 5 5 re
+f
+Q
+2050 190 m
+2020 190 l
+2020 270 l
+2250 270 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2248 268 5 5 re
+f
+Q
+30 70 m
+50 70 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+28 68 5 5 re
+f
+Q
+280 80 m
+260 80 l
+260 160 l
+560 160 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+558 158 5 5 re
+f
+Q
+820 110 m
+790 110 l
+790 180 l
+1020 180 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1018 178 5 5 re
+f
+Q
+1280 140 m
+1260 140 l
+1260 220 l
+1560 220 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1558 218 5 5 re
+f
+Q
+1820 170 m
+1790 170 l
+1790 240 l
+2020 240 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2018 238 5 5 re
+f
+Q
+2280 200 m
+2250 200 l
+2250 270 l
+2490 270 l
+2490 180 l
+2520 180 l
+S
+2490 260 m
+2720 260 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2718 258 5 5 re
+f
+Q
+2980 210 m
+2950 210 l
+2950 290 l
+3190 290 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+3188 288 5 5 re
+f
+Q
+3520 240 m
+3490 240 l
+3490 320 l
+3800 320 l
+3800 260 l
+3830 260 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2488 258 5 5 re
+f
+Q
+2750 190 m
+2720 190 l
+2720 260 l
+2950 260 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2948 258 5 5 re
+f
+Q
+3210 220 m
+3190 220 l
+3190 300 l
+3490 300 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+3488 298 5 5 re
+f
+Q
+0 90 m
+20 90 l
+20 160 l
+250 160 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+248 158 5 5 re
+f
+Q
+590 120 m
+570 120 l
+570 190 l
+800 190 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+798 188 5 5 re
+f
+Q
+1050 150 m
+1030 150 l
+1030 220 l
+1250 220 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1248 218 5 5 re
+f
+Q
+1590 180 m
+1570 180 l
+1570 250 l
+1800 250 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1798 248 5 5 re
+f
+Q
+2050 210 m
+2030 210 l
+2030 280 l
+2260 280 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2258 278 5 5 re
+f
+Q
+20 90 m
+50 90 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+18 88 5 5 re
+f
+Q
+280 100 m
+250 100 l
+250 170 l
+570 170 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+568 168 5 5 re
+f
+Q
+820 130 m
+800 130 l
+800 190 l
+1030 190 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1028 188 5 5 re
+f
+Q
+1280 160 m
+1250 160 l
+1250 230 l
+1570 230 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1568 228 5 5 re
+f
+Q
+1820 190 m
+1800 190 l
+1800 250 l
+2030 250 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2028 248 5 5 re
+f
+Q
+2280 220 m
+2260 220 l
+2260 280 l
+2500 280 l
+2500 200 l
+2520 200 l
+S
+2500 270 m
+2730 270 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2728 268 5 5 re
+f
+Q
+2980 230 m
+2960 230 l
+2960 300 l
+3180 300 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+3178 298 5 5 re
+f
+Q
+3520 260 m
+3500 260 l
+3500 330 l
+3810 330 l
+3810 280 l
+3830 280 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2498 268 5 5 re
+f
+Q
+2750 210 m
+2730 210 l
+2730 270 l
+2960 270 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+2958 268 5 5 re
+f
+Q
+3210 240 m
+3180 240 l
+3180 310 l
+3500 310 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+3498 308 5 5 re
+f
+Q
+4 w
+0.255 0.380 0.624 RG
+0 50 m
+50 50 l
+S
+endstream
+endobj
+3 0 obj
+<<
+  /Type    /Pages
+  /Kids
+  [
+  10 0 R
+  ]
+  /Count   1
+  /ProcSet [ /PDF /Text /ImageB /ImageC ]
+>>
+endobj
+6 0 obj
+<<
+  /GS << /Type /ExtGState
+         /LC    0
+         /LJ    0
+         /ML    4.0
+         /ca    1.0
+         /CA    1.0
+         /AIS   false
+         /SMask /None
+  >>
+  /GSa0 << /Type /ExtGState /ca 1 >>
+  /GSA0 << /Type /ExtGState /CA 1 >>
+>>
+endobj
+22 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_LabelSelect_Batch_0 StreamingDataflowPartition_1_StreamingDataflowPartition_1_LabelSelect_Batch_0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 493 378 539 394]
+  /Parent 21 0 R
+  /Next   23 0 R
+>>
+endobj
+23 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_0 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 196 391 255 407]
+  /Parent 21 0 R
+  /Prev   22 0 R
+  /Next   24 0 R
+>>
+endobj
+24 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_1 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_1_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 446 380 505 397]
+  /Parent 21 0 R
+  /Prev   23 0 R
+  /Next   25 0 R
+>>
+endobj
+25 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_0 StreamingDataflowPartition_1_StreamingFCLayer_Batch_0_imp)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 140 396 181 411]
+  /Parent 21 0 R
+  /Prev   24 0 R
+  /Next   26 0 R
+>>
+endobj
+26 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_1 StreamingDataflowPartition_1_StreamingFCLayer_Batch_1_imp)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 270 388 311 403]
+  /Parent 21 0 R
+  /Prev   25 0 R
+  /Next   27 0 R
+>>
+endobj
+27 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_2 StreamingDataflowPartition_1_StreamingFCLayer_Batch_2_imp)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 329 385 370 399]
+  /Parent 21 0 R
+  /Prev   26 0 R
+  /Next   28 0 R
+>>
+endobj
+28 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_3 StreamingDataflowPartition_1_StreamingFCLayer_Batch_3_imp)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 390 386 431 400]
+  /Parent 21 0 R
+  /Prev   27 0 R
+  /Next   29 0 R
+>>
+endobj
+29 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_0 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 44 400 77 416]
+  /Parent 21 0 R
+  /Prev   28 0 R
+  /Next   30 0 R
+>>
+endobj
+30 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_1 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_1_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 114 396 148 412]
+  /Parent 21 0 R
+  /Prev   29 0 R
+  /Next   31 0 R
+>>
+endobj
+31 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_2 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_2_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 173 392 207 408]
+  /Parent 21 0 R
+  /Prev   30 0 R
+  /Next   32 0 R
+>>
+endobj
+32 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_3 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_3_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 243 388 277 404]
+  /Parent 21 0 R
+  /Prev   31 0 R
+  /Next   33 0 R
+>>
+endobj
+33 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_4 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_4_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 303 384 337 400]
+  /Parent 21 0 R
+  /Prev   32 0 R
+  /Next   34 0 R
+>>
+endobj
+34 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_5 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_5_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 364 385 398 402]
+  /Parent 21 0 R
+  /Prev   33 0 R
+  /Next   35 0 R
+>>
+endobj
+35 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_6 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_6_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 424 381 458 398]
+  /Parent 21 0 R
+  /Prev   34 0 R
+  /Next   36 0 R
+>>
+endobj
+36 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_7 StreamingDataflowPartition_1_StreamingDataflowPartition_1_StreamingFIFO_7_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 534 375 568 391]
+  /Parent 21 0 R
+  /Prev   35 0 R
+  /Next   37 0 R
+>>
+endobj
+37 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_Thresholding_Batch_0 StreamingDataflowPartition_1_StreamingDataflowPartition_1_Thresholding_Batch_0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 72 398 119 415]
+  /Parent 21 0 R
+  /Prev   36 0 R
+>>
+endobj
+21 0 obj
+<<
+  /Title  (instances)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 20 0 R
+  /First  22 0 R
+  /Last   37 0 R
+  /Count  16
+  /Next   38 0 R
+>>
+endobj
+39 0 obj
+<<
+  /Title  (ap_clk input)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 35 407 43 409]
+  /Parent 38 0 R
+  /Next   40 0 R
+>>
+endobj
+40 0 obj
+<<
+  /Title  (ap_rst_n input)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 33 404 43 406]
+  /Parent 38 0 R
+  /Prev   39 0 R
+  /Next   41 0 R
+>>
+endobj
+41 0 obj
+<<
+  /Title  (m_axis_0 output)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 567 383 578 386]
+  /Parent 38 0 R
+  /Prev   40 0 R
+  /Next   42 0 R
+>>
+endobj
+42 0 obj
+<<
+  /Title  (s_axis_0 input)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 33 409 43 412]
+  /Parent 38 0 R
+  /Prev   41 0 R
+>>
+endobj
+38 0 obj
+<<
+  /Title  (ports)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 20 0 R
+  /First  39 0 R
+  /Last   42 0 R
+  /Count  4
+  /Prev   21 0 R
+  /Next   43 0 R
+>>
+endobj
+43 0 obj
+<<
+  /Title  (portBuses)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 20 0 R
+  /First  0 0 R
+  /Last   0 0 R
+  /Count  0
+  /Prev   38 0 R
+  /Next   44 0 R
+>>
+endobj
+45 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_LabelSelect_Batch_0_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 533 386 539 386]
+  /Parent 44 0 R
+  /Next   46 0 R
+>>
+endobj
+46 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_0_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 242 399 249 399]
+  /Parent 44 0 R
+  /Prev   45 0 R
+  /Next   47 0 R
+>>
+endobj
+47 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingDataWidthConverter_Batch_1_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 493 388 499 388]
+  /Parent 44 0 R
+  /Prev   46 0 R
+  /Next   48 0 R
+>>
+endobj
+48 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_0_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 172 403 179 403]
+  /Parent 44 0 R
+  /Prev   47 0 R
+  /Next   49 0 R
+>>
+endobj
+49 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_1_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 302 395 308 395]
+  /Parent 44 0 R
+  /Prev   48 0 R
+  /Next   50 0 R
+>>
+endobj
+50 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_2_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 361 390 370 397]
+  /Parent 44 0 R
+  /Prev   49 0 R
+  /Next   51 0 R
+>>
+endobj
+51 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFCLayer_Batch_3_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 423 392 429 392]
+  /Parent 44 0 R
+  /Prev   50 0 R
+  /Next   52 0 R
+>>
+endobj
+52 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_0_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 72 409 79 409]
+  /Parent 44 0 R
+  /Prev   51 0 R
+  /Next   53 0 R
+>>
+endobj
+53 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_1_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 142 405 149 405]
+  /Parent 44 0 R
+  /Prev   52 0 R
+  /Next   54 0 R
+>>
+endobj
+54 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_2_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 202 401 208 401]
+  /Parent 44 0 R
+  /Prev   53 0 R
+  /Next   55 0 R
+>>
+endobj
+55 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_3_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 272 397 279 397]
+  /Parent 44 0 R
+  /Prev   54 0 R
+  /Next   56 0 R
+>>
+endobj
+56 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_4_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 332 394 338 394]
+  /Parent 44 0 R
+  /Prev   55 0 R
+  /Next   57 0 R
+>>
+endobj
+57 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_5_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 393 395 399 395]
+  /Parent 44 0 R
+  /Prev   56 0 R
+  /Next   58 0 R
+>>
+endobj
+58 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_6_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 452 391 459 391]
+  /Parent 44 0 R
+  /Prev   57 0 R
+  /Next   59 0 R
+>>
+endobj
+59 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_StreamingFIFO_7_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 563 384 568 384]
+  /Parent 44 0 R
+  /Prev   58 0 R
+  /Next   60 0 R
+>>
+endobj
+60 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_Thresholding_Batch_0_out_V_V)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 112 407 119 407]
+  /Parent 44 0 R
+  /Prev   59 0 R
+  /Next   61 0 R
+>>
+endobj
+61 0 obj
+<<
+  /Title  (ap_clk_0_1)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 42 375 539 408]
+  /Parent 44 0 R
+  /Prev   60 0 R
+  /Next   62 0 R
+>>
+endobj
+62 0 obj
+<<
+  /Title  (ap_rst_n_0_1)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 42 374 539 405]
+  /Parent 44 0 R
+  /Prev   61 0 R
+  /Next   63 0 R
+>>
+endobj
+63 0 obj
+<<
+  /Title  (in0_V_V_0_1)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 42 410 49 410]
+  /Parent 44 0 R
+  /Prev   62 0 R
+>>
+endobj
+44 0 obj
+<<
+  /Title  (nets)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 20 0 R
+  /First  45 0 R
+  /Last   63 0 R
+  /Count  19
+  /Prev   43 0 R
+  /Next   64 0 R
+>>
+endobj
+64 0 obj
+<<
+  /Title  (netBundles)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 20 0 R
+  /First  0 0 R
+  /Last   0 0 R
+  /Count  0
+  /Prev   44 0 R
+>>
+endobj
+20 0 obj
+<<
+  /Title  (Nlview page 1)
+  /C      [0.4 0.0 0.0]
+  /Dest   [10 0 R /Fit]
+  /Parent 7 0 R
+  /First  21 0 R
+  /Last   64 0 R
+  /Count  5
+>>
+endobj
+8 0 obj
+<<
+>>
+endobj
+9 0 obj
+<<
+  /Im0 12 0 R
+  /Im1 13 0 R
+  /Im2 14 0 R
+  /Im3 15 0 R
+  /Im4 16 0 R
+  /Im5 17 0 R
+  /Im6 18 0 R
+  /Im7 19 0 R
+>>
+endobj
+12 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 64
+  /Height 64
+  /Length 12288
+>>
+stream
+ÿÿÿÉË‘“%’*’”6”–?–™H™›QœŸZŸ¡b£¥j¦¨qª«y®¯€±³‡´¶¸º–¼¾ÀÁ¤ÅÆ«ÉʱÍηÑÒ½ÖÖÄÚÛËÞßÑâã×ççÝëëäïïêóóï÷÷õýýüýýúõõíîïãéêÙäåÏßàÆÚÛ½ÕÖµÏЭÊÌ¥ÅÇœ¿Á“¹¼‹µ¶‚±²{­¬s§¨k¢£dž]˜™V”•PHŠ‹B…†;€‚4|~,xz%suln‘NïïèÌÍ”‹Ž‹ŽŽ‘(‘0‘“9“–C–˜L™œUœž^ ¢f£¥m§¨u«¬}®°„²´Œ¶¸“º¼š¾¿¡ÃŨÈɯÌ͵ÐÑ»ÕÕÂÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÎÞßÅÙÚ»ÔÕ²ÎϪÉË¢ÃÆ™½¿·ºˆ²´®¯wª©o¤¥gŸ `š›Y•–Q‘’KŒC‡ˆ<‚ƒ5}.y{&tvprlnbc�‘K“–‰ŒŒŠ!‘1’”;”—E—™NšWŸ_¡£g¤¦n¨©v¬­~¯±…³µ·¹”»½›¿À¢ÄÅ©ÈÉ°ÌͶÐѼÕÕÃÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýýýýúõõíîïâèéØãäÎÞßÅÙÚ¼ÔÕ³ÎÏ«ÉË£ÄÆš¾À‘¸»‰³µ€¯°x«ªp¥¦h ¡a›œZ–—S’“MŽEˆ‰>ƒ„7~€/z|(uw!lnkmhjcd�’‹ˆ‹¸ºyêëÙííÞííßíîáîïãïïäïðåððçðñèññéòòëòóìóóíôôïõõðõöñööò÷÷ó÷÷ôøøõùùöúú÷úúùûûúüüûüüüýýüþþþÿÿÿÿÿÿþþüüüúûüùûû÷úúöùùôøøó÷÷ñööðõöïôõíóôëòóêòòèññçððæïïäîîãííâìíáììßëëÞêêÝééÛèéÚèéÚÃÄžlogifg“ŠŠêëØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ3ce�fg“Š‹ŽèéÔÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþ‚„5ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿêëë÷÷÷ÿÿÿýýýééêúúûýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûûìììÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïððááâêêêðððøøøïïðÿÿÿÿÿÿýýýêêëûûûÿÿÿõõöíííÿÿÿòóóïïðÿÿÿÿÿÿÿÿÿûûûäääðððÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿ˜™›««­ÿÿÿÖ×؃„†ÊÊË´µ¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèè邃†ýýýÿÿÿÿÿÿÿÿÿÿÿÿãä䏐“ÆÇÈ–—˜´µ¶ŸŸ¡ÿÿÿÿÿÿòòóxy|æææÿÿÿÁÁÊ‹ÿÿÿ®¯°ššœÿÿÿÿÿÿââã„…ˆ€€ƒuvy¯°±ýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÄÅƃ„†ÿÿÿ¬¬®žž ââãÌÌÍ××ØíííûûûÏÏÐûûûÖÖ׿¿ÁåææÿÿÿúúúÈÉÊ¿ÀÁ‚ƒ†ÿÿÿóóóÂÂÃÖÖ×ÿÿÿÿÿÿ®¯°ææç’’•Ž‘¬­¯ÿÿÿÿÿÿòòóvwzãããýýý¾¾Àˆ‰‹ÿÿÿ¬­®˜˜šÿÿÿÿÿÿššœ’ýþýåææ¿¿Áýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿííîvwzîïÏÏÑÄÅÆžŸ¡®®°®®°××׌ŒÔÕÕ“”–±±³z{~îïïš›’“•Œ€„ððð†‡ŠžŸ¡……ˆ²³´ÿÿÿº»¼æææ³³µ­®°ÄÄÆÿÿÿÿÿÿôôôvwz{|~‚…qru‘’”ÿÿÿ­®¯™™›ÿÿÿÿÿÿËËÌrsuƒ„‡¦§©ëëëÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿŠ‹±±³€€ƒõõõÂÂÄžŸ¡åæ提’ ¡£¾¿Àçèè ¡¢›œžtuxÍÎÏ|}€ÿÿÿææ燈‹ÁÁËŒŽÿÿÿääåwx{úúûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòówx{ÍÍÎãã䮯±‹ŒŽÿÿÿ­®¯››ÿÿÿÿÿÿÿÿÿððñÉÉÊ—˜štuwññòÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿ¶¶¸bcfŽÿÿÿ½½¿——™ÿÿÿ„…‡noróó󚛝——™ÜÝÝxy|ÔÔÕy{}ááâÉÉÊ€‚ÖÖ×yz}ää帹ºˆ‰‹þþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòóstwèèèÿÿÿÁÁÃ…†ˆÿÿÿ¬­®‹‹Žââãèè觨©¬­¯êêêÆÆÇmnqîîïÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿãääuvx½½¿ÿÿÿÈÈÊ©©«ÿÿÿ²³´––˜ÿÿÿÑÒÓ‘’”¤¥§ˆ‰ŒëëëÊÊËŽ‘™šœ–—™ÿÿÿ¼½¾‡ˆŠ‘‘”ãääÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôôõŒéééÿÿÿÊÊÌ›œžÿÿÿÁÁÃyz}~‚””–ÕÕÖ‘’”€€ƒ‹‹ŽÑÑÒÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿþþþÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿýýþýýýýýýÿÿÿÿÿÿýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýþøÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøÞãä|ö÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýõÛÞ]ÙÛQêëžüüòÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâäyØÛMØÚJÞàgòóÂÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèé“ØÚKÚÜUÙÛOØÚMäæ…ùùâÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî­ØÚKÚÜUÚÜUÚÜUØÛLÛÝZìí©þþøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóôÈØÚMÚÜUÚÜUÚÜUÚÜUÙÛRØÚJàâqôôÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøùáÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÙÛNÙÛOçèúûêÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýôÝßcÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTØÚKÜÞ^ïð¹ÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáãwÙÛOÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOÛÜWûûíÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçèØÚLÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÙÛR÷÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî«ØÚKÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚLñò¿ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóóÇØÚMÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚKëì£ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøßÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚMåç‰ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûÝÞ^ÙÛPÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOàârÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜÜ·ÛÜ€ÛÝTÙÛNÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÜRÜÞ`üüïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËÌ‘‹	ííÜ÷øÕàârØÚKÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÚÜS÷øÚÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾Àw„†�‹	íîÚÿÿÿþþùíî«ÛÝZØÚKÚÜTÚÜUÚÜUÚÜUÚÜUØÚLñòÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúô±²Yƒ…�‰‹
íîÙÿÿÿÿÿÿÿÿÿùùãåæˆØÚMÙÛPÚÜUÚÜUÚÜUØÚJìí¦ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòó䤦@ƒ…�‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòóÄÞàiØÚJÚÜSÚÜUØÚLæç‹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿééЙ›(…†�‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýüóêëžÚÜUØÛMÙÛOáâtÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞÞ¸’†ˆ�‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷÷Ùâä{×ÙHÜÞ]üüñÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÐѝŠŒˆŠ‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþïð¶ÝßbøøÝÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüüîüüîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÃÄ€†ˆ�‰‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôõÐíï´ûûîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýôýýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùòµ·c„†�ŠŒ‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúèîï·æè—äæŽæç•íï´øøàþÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¿Àw€ƒ�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùúæíïµåç’äæŒåç’åç“åç’ãæäæéë¥óôÏüýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿµ¶b„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùäíî³åç‘äæåç’åç“åç“åç“åç“åç“åç“äæãæŒçé™ïð½ùúçÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿøøâìî±äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç‘ãæŒåç‘ëí«õöÖþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒîïÞÿÿÿøùáìî¯äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç’äæŽäæèêžñòÅúûêÿÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ‰‹æçºíï®äæŽäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæáå„èêžúúèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹ŠŒ„†�½¾sõöÜíï´æè–ãæŒäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèéžóôÌýýõÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹ˆŠ‡‰ÈÊŒÿÿÿÿÿÿÿÿÿûüðòóÈéê¡äæŽäæŽåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèêŸóõÎýýøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹	‡‰�ŒŽÖÖ¨ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþúö÷Ùìí®åç‘ãæŒåç‘åç“åç“åç“åç“åç“åç“åç“åç“äæäæŒèê ôõÏþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹†‡�”•âãÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúéðñ¿çé›äæŒäæåç“åç“åç“åç“äæãæŒèë¢ôõÒþþúÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹„†�ž/ííØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýöôõÑêì§äçäæäæŽäæŒéë¤õöÔþþûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹ƒ…�¨ªHõöëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþøùáíïµêì§õöÕþÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�ŠŒƒ†�´¶büü÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸g‚„�…‡�ÂÃÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ´µ_ƒ…ÐÑ›ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾¿v×רÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûöÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽëìÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ…6ce�fg’ŠˆŠÎТþþüþþþþþþþþþþþþþþþþþþÿÿþÿÿþþÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþþÿþÿÿþþþþþþþþþþþþþþþþþþþþþýþþýþþýþþþãäÒsufgfgŽ‘‹ŒŽ’)¡¢P£¥Z¥§b§©jª­q­®x°²²´…¶·‹¹º’¼½˜¿ÁŸÂÄ¥ÆÇ«ÉʱÍζÑÒ¼ÔÕÂ×ØÇÜÜÍßàÓãã×æçÝêêâîîèòòíõõòùùøþþýþþü÷÷ññòçìíÞèéÖãäÎßàÇÛÜ¿Ö׸ÒÓ²ÍϪÈÊ¢ÃÆ›¿Á”¼¼¸¸†³´€¯°z«¬t¦§n£¤iŸ b›œ\–—V’”PJ‹Dxz$ik
hjef�®°]†‰Œ&ŒŽ+Ž4”?”–H—šQšœZž b¡£i¥¦q©ªz¬®°³‰µ·¹»˜½¾ŸÂæÆÇ­ÊË´ÎкÔÔÁØÙÈÜÝÎàáÕååÛéêâïïèóóï÷÷õýýüýýúõõìíïáçè×âãÌÝÞÃØÙºÓÔ°ÌͨÇÉ ÂÄ—¼¾¶¹…°³|¬­s¨§k¢£cž\˜™T’”MŽG‰Š?„…7€0z|)vx!proqlnfh�klõõí®°_‹Ž’*‘“4“•=•˜G˜šP›žYž a¢¤i¥§p©ªx­®°²†´¶Ž¸º•¼¾œÀÁ£ÅƪÉʱÍηÐѽÕÕÄÙÚËÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÏÞßÆÙÚ½ÔÕ´ÏЬÊ̤ÅÆ›¿Á’¹¼Š´¶°±z¬«r¦§j¡¢cœ\—˜U“”OŽG‰Š@„…92{}*wy#sumomnŒuendstream
+endobj
+65 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 16
+  /Height 16
+  /Length 256
+>>
+stream
+�����������������������������������ÿÿ�������������ÿÿ�
+������������ÿÿ����������ÿÿ�������������ÿÿ���������ÿÿÿÿÿÿÿÿÿÿÿÿ����ÿÿÿÿÿÿÿÿÿÿÿÿ���������ÿÿ��������������ÿÿ��������������ÿÿ��������������ÿÿ��������������ÿÿ���������������������������������������endstream
+endobj
+13 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 16
+  /Height 16
+  /SMask 65 0 R
+  /Length 768
+>>
+stream
+������������������������������������������������������������������IIIfff€€€������������������������������������������2N‚2N‚���```������������������������������������2N‚2N‚���MMM������������������������������������2N‚2N‚���UUU������������������������€€€@@@UUU���2N‚2N‚���[[[������������������������������������2N‚2N‚���������������������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������������������������2N‚2N‚������������������������������������������2N‚2N‚������������������������������������������2N‚2N‚������������������������������������������2N‚2N‚������������������������������������������2N‚2N‚���������������������������������������������������������������������������������������������������������������������endstream
+endobj
+66 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ���������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ����������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿendstream
+endobj
+14 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 66 0 R
+  /Length 540
+>>
+stream
+?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž�����������������������������������ÿ������������������������������?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nžendstream
+endobj
+67 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ
�������������ÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿ��������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������ÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿ�����������������ÿÿÿÿÿÿðÿÿÿÿÿÿÿÿÿÿÿÿÿendstream
+endobj
+15 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 67 0 R
+  /Length 540
+>>
+stream
+?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž;v+€ª������Ft¢3f™����€€+€ª;l������������������������������?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž������������������������������������������������������������?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž������������������������������������������������������������?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž�����������������������������������������������������ÿ��ÿ3f™?nž?nž?nž?nž?nž?nž?ož?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nžendstream
+endobj
+16 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 64
+  /Height 64
+  /Length 12288
+>>
+stream
+ÿÿÿÉË‘“%’*’”6”–?–™H™›QœŸZŸ¡b£¥j¦¨qª«y®¯€±³‡´¶¸º–¼¾ÀÁ¤ÅÆ«ÉʱÍηÑÒ½ÖÖÄÚÛËÞßÑâã×ççÝëëäïïêóóï÷÷õýýüýýúõõíîïãéêÙäåÏßàÆÚÛ½ÕÖµÏЭÊÌ¥ÅÇœ¿Á“¹¼‹µ¶‚±²{­¬s§¨k¢£dž]˜™V”•PHŠ‹B…†;€‚4|~,xz%suln‘NïïèÌÍ”‹Ž‹ŽŽ‘(‘0‘“9“–C–˜L™œUœž^ ¢f£¥m§¨u«¬}®°„²´Œ¶¸“º¼š¾¿¡ÃŨÈɯÌ͵ÐÑ»ÕÕÂÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÎÞßÅÙÚ»ÔÕ²ÎϪÉË¢ÃÆ™½¿·ºˆ²´®¯wª©o¤¥gŸ `š›Y•–Q‘’KŒC‡ˆ<‚ƒ5}.y{&tvprlnbc�‘K“–‰ŒŒŠ!‘1’”;”—E—™NšWŸ_¡£g¤¦n¨©v¬­~¯±…³µ·¹”»½›¿À¢ÄÅ©ÈÉ°ÌͶÐѼÕÕÃÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýýýýúõõíîïâèéØãäÎÞßÅÙÚ¼ÔÕ³ÎÏ«ÉË£ÄÆš¾À‘¸»‰³µ€¯°x«ªp¥¦h ¡a›œZ–—S’“MŽEˆ‰>ƒ„7~€/z|(uw!lnkmhjcd�’‹ˆ‹¸ºyêëÙííÞííßíîáîïãïïäïðåððçðñèññéòòëòóìóóíôôïõõðõöñööò÷÷ó÷÷ôøøõùùöúú÷úúùûûúüüûüüüýýüþþþÿÿÿÿÿÿþþüüüúûüùûû÷úúöùùôøøó÷÷ñööðõöïôõíóôëòóêòòèññçððæïïäîîãííâìíáììßëëÞêêÝééÛèéÚèéÚÃÄžlogifg“ŠŠêëØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ3ce�fg“Š‹ŽèéÔÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþ‚„5ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿêëë÷÷÷ÿÿÿýýýééêúúûýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûûìììÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïððááâêêêðððøøøïïðÿÿÿÿÿÿýýýêêëûûûÿÿÿõõöíííÿÿÿòóóïïðÿÿÿÿÿÿÿÿÿûûûäääðððÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿ˜™›««­ÿÿÿÖ×؃„†ÊÊË´µ¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèè邃†ýýýÿÿÿÿÿÿÿÿÿÿÿÿãä䏐“ÆÇÈ–—˜´µ¶ŸŸ¡ÿÿÿÿÿÿòòóxy|æææÿÿÿÁÁÊ‹ÿÿÿ®¯°ššœÿÿÿÿÿÿââã„…ˆ€€ƒuvy¯°±ýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÄÅƃ„†ÿÿÿ¬¬®žž ââãÌÌÍ××ØíííûûûÏÏÐûûûÖÖ׿¿ÁåææÿÿÿúúúÈÉÊ¿ÀÁ‚ƒ†ÿÿÿóóóÂÂÃÖÖ×ÿÿÿÿÿÿ®¯°ææç’’•Ž‘¬­¯ÿÿÿÿÿÿòòóvwzãããýýý¾¾Àˆ‰‹ÿÿÿ¬­®˜˜šÿÿÿÿÿÿššœ’ýþýåææ¿¿Áýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿííîvwzîïÏÏÑÄÅÆžŸ¡®®°®®°××׌ŒÔÕÕ“”–±±³z{~îïïš›’“•Œ€„ððð†‡ŠžŸ¡……ˆ²³´ÿÿÿº»¼æææ³³µ­®°ÄÄÆÿÿÿÿÿÿôôôvwz{|~‚…qru‘’”ÿÿÿ­®¯™™›ÿÿÿÿÿÿËËÌrsuƒ„‡¦§©ëëëÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿŠ‹±±³€€ƒõõõÂÂÄžŸ¡åæ提’ ¡£¾¿Àçèè ¡¢›œžtuxÍÎÏ|}€ÿÿÿææ燈‹ÁÁËŒŽÿÿÿääåwx{úúûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòówx{ÍÍÎãã䮯±‹ŒŽÿÿÿ­®¯››ÿÿÿÿÿÿÿÿÿððñÉÉÊ—˜štuwññòÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿ¶¶¸bcfŽÿÿÿ½½¿——™ÿÿÿ„…‡noróó󚛝——™ÜÝÝxy|ÔÔÕy{}ááâÉÉÊ€‚ÖÖ×yz}ää帹ºˆ‰‹þþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòóstwèèèÿÿÿÁÁÃ…†ˆÿÿÿ¬­®‹‹Žââãèè觨©¬­¯êêêÆÆÇmnqîîïÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿãääuvx½½¿ÿÿÿÈÈÊ©©«ÿÿÿ²³´––˜ÿÿÿÑÒÓ‘’”¤¥§ˆ‰ŒëëëÊÊËŽ‘™šœ–—™ÿÿÿ¼½¾‡ˆŠ‘‘”ãääÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôôõŒéééÿÿÿÊÊÌ›œžÿÿÿÁÁÃyz}~‚””–ÕÕÖ‘’”€€ƒ‹‹ŽÑÑÒÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿþþþÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿýýþýýýýýýÿÿÿÿÿÿýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýþøÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøÞãä|ö÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýõÛÞ]ÙÛQêëžüüòÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâäyØÛMØÚJÞàgòóÂÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèé“ØÚKÚÜUÙÛOØÚMäæ…ùùâÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî­ØÚKÚÜUÚÜUÚÜUØÛLÛÝZìí©þþøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóôÈØÚMÚÜUÚÜUÚÜUÚÜUÙÛRØÚJàâqôôÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøùáÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÙÛNÙÛOçèúûêÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýôÝßcÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTØÚKÜÞ^ïð¹ÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáãwÙÛOÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOÛÜWûûíÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçèØÚLÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÙÛR÷÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî«ØÚKÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚLñò¿ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóóÇØÚMÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚKëì£ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøßÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚMåç‰ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûÝÞ^ÙÛPÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOàârÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜÜ·ÛÜ€ÛÝTÙÛNÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÜRÜÞ`üüïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËÌ‘‹	ííÜ÷øÕàârØÚKÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÚÜS÷øÚÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾Àw„†�‹	íîÚÿÿÿþþùíî«ÛÝZØÚKÚÜTÚÜUÚÜUÚÜUÚÜUØÚLñòÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúô±²Yƒ…�‰‹
íîÙÿÿÿÿÿÿÿÿÿùùãåæˆØÚMÙÛPÚÜUÚÜUÚÜUØÚJìí¦ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòó䤦@ƒ…�‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòóÄÞàiØÚJÚÜSÚÜUØÚLæç‹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿééЙ›(…†�‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýüóêëžÚÜUØÛMÙÛOáâtÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞÞ¸’†ˆ�‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷÷Ùâä{×ÙHÜÞ]üüñÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÐѝŠŒˆŠ‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþïð¶ÝßbøøÝÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüüîüüîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÃÄ€†ˆ�‰‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôõÐíï´ûûîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýôýýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùòµ·c„†�ŠŒ‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúèîï·æè—äæŽæç•íï´øøàþÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¿Àw€ƒ�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùúæíïµåç’äæŒåç’åç“åç’ãæäæéë¥óôÏüýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿµ¶b„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùäíî³åç‘äæåç’åç“åç“åç“åç“åç“åç“äæãæŒçé™ïð½ùúçÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿøøâìî±äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç‘ãæŒåç‘ëí«õöÖþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒîïÞÿÿÿøùáìî¯äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç’äæŽäæèêžñòÅúûêÿÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ‰‹æçºíï®äæŽäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæáå„èêžúúèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹ŠŒ„†�½¾sõöÜíï´æè–ãæŒäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèéžóôÌýýõÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹ˆŠ‡‰ÈÊŒÿÿÿÿÿÿÿÿÿûüðòóÈéê¡äæŽäæŽåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèêŸóõÎýýøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹	‡‰�ŒŽÖÖ¨ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþúö÷Ùìí®åç‘ãæŒåç‘åç“åç“åç“åç“åç“åç“åç“åç“äæäæŒèê ôõÏþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹†‡�”•âãÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúéðñ¿çé›äæŒäæåç“åç“åç“åç“äæãæŒèë¢ôõÒþþúÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹„†�ž/ííØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýöôõÑêì§äçäæäæŽäæŒéë¤õöÔþþûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹ƒ…�¨ªHõöëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþøùáíïµêì§õöÕþÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�ŠŒƒ†�´¶büü÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸g‚„�…‡�ÂÃÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ´µ_ƒ…ÐÑ›ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾¿v×רÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûöÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽëìÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ…6ce�fg’ŠˆŠÎТþþüþþþþþþþþþþþþþþþþþþÿÿþÿÿþþÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþþÿþÿÿþþþþþþþþþþþþþþþþþþþþþýþþýþþýþþþãäÒsufgfgŽ‘‹ŒŽ’)¡¢P£¥Z¥§b§©jª­q­®x°²²´…¶·‹¹º’¼½˜¿ÁŸÂÄ¥ÆÇ«ÉʱÍζÑÒ¼ÔÕÂ×ØÇÜÜÍßàÓãã×æçÝêêâîîèòòíõõòùùøþþýþþü÷÷ññòçìíÞèéÖãäÎßàÇÛÜ¿Ö׸ÒÓ²ÍϪÈÊ¢ÃÆ›¿Á”¼¼¸¸†³´€¯°z«¬t¦§n£¤iŸ b›œ\–—V’”PJ‹Dxz$ik
hjef�®°]†‰Œ&ŒŽ+Ž4”?”–H—šQšœZž b¡£i¥¦q©ªz¬®°³‰µ·¹»˜½¾ŸÂæÆÇ­ÊË´ÎкÔÔÁØÙÈÜÝÎàáÕååÛéêâïïèóóï÷÷õýýüýýúõõìíïáçè×âãÌÝÞÃØÙºÓÔ°ÌͨÇÉ ÂÄ—¼¾¶¹…°³|¬­s¨§k¢£cž\˜™T’”MŽG‰Š?„…7€0z|)vx!proqlnfh�klõõí®°_‹Ž’*‘“4“•=•˜G˜šP›žYž a¢¤i¥§p©ªx­®°²†´¶Ž¸º•¼¾œÀÁ£ÅƪÉʱÍηÐѽÕÕÄÙÚËÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÏÞßÆÙÚ½ÔÕ´ÏЬÊ̤ÅÆ›¿Á’¹¼Š´¶°±z¬«r¦§j¡¢cœ\—˜U“”OŽG‰Š@„…92{}*wy#sumomnŒuendstream
+endobj
+17 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 64
+  /Height 64
+  /Length 12288
+>>
+stream
+ÿÿÿÉË‘“%’*’”6”–?–™H™›QœŸZŸ¡b£¥j¦¨qª«y®¯€±³‡´¶¸º–¼¾ÀÁ¤ÅÆ«ÉʱÍηÑÒ½ÖÖÄÚÛËÞßÑâã×ççÝëëäïïêóóï÷÷õýýüýýúõõíîïãéêÙäåÏßàÆÚÛ½ÕÖµÏЭÊÌ¥ÅÇœ¿Á“¹¼‹µ¶‚±²{­¬s§¨k¢£dž]˜™V”•PHŠ‹B…†;€‚4|~,xz%suln‘NïïèÌÍ”‹Ž‹ŽŽ‘(‘0‘“9“–C–˜L™œUœž^ ¢f£¥m§¨u«¬}®°„²´Œ¶¸“º¼š¾¿¡ÃŨÈɯÌ͵ÐÑ»ÕÕÂÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÎÞßÅÙÚ»ÔÕ²ÎϪÉË¢ÃÆ™½¿·ºˆ²´®¯wª©o¤¥gŸ `š›Y•–Q‘’KŒC‡ˆ<‚ƒ5}.y{&tvprlnbc�‘K“–‰ŒŒŠ!‘1’”;”—E—™NšWŸ_¡£g¤¦n¨©v¬­~¯±…³µ·¹”»½›¿À¢ÄÅ©ÈÉ°ÌͶÐѼÕÕÃÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýýýýúõõíîïâèéØãäÎÞßÅÙÚ¼ÔÕ³ÎÏ«ÉË£ÄÆš¾À‘¸»‰³µ€¯°x«ªp¥¦h ¡a›œZ–—S’“MŽEˆ‰>ƒ„7~€/z|(uw!lnkmhjcd�’‹ˆ‹¸ºyêëÙííÞííßíîáîïãïïäïðåððçðñèññéòòëòóìóóíôôïõõðõöñööò÷÷ó÷÷ôøøõùùöúú÷úúùûûúüüûüüüýýüþþþÿÿÿÿÿÿþþüüüúûüùûû÷úúöùùôøøó÷÷ñööðõöïôõíóôëòóêòòèññçððæïïäîîãííâìíáììßëëÞêêÝééÛèéÚèéÚÃÄžlogifg“ŠŠêëØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ3ce�fg“Š‹ŽèéÔÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþ‚„5ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿêëë÷÷÷ÿÿÿýýýééêúúûýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûûìììÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïððááâêêêðððøøøïïðÿÿÿÿÿÿýýýêêëûûûÿÿÿõõöíííÿÿÿòóóïïðÿÿÿÿÿÿÿÿÿûûûäääðððÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿ˜™›««­ÿÿÿÖ×؃„†ÊÊË´µ¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèè邃†ýýýÿÿÿÿÿÿÿÿÿÿÿÿãä䏐“ÆÇÈ–—˜´µ¶ŸŸ¡ÿÿÿÿÿÿòòóxy|æææÿÿÿÁÁÊ‹ÿÿÿ®¯°ššœÿÿÿÿÿÿââã„…ˆ€€ƒuvy¯°±ýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÄÅƃ„†ÿÿÿ¬¬®žž ââãÌÌÍ××ØíííûûûÏÏÐûûûÖÖ׿¿ÁåææÿÿÿúúúÈÉÊ¿ÀÁ‚ƒ†ÿÿÿóóóÂÂÃÖÖ×ÿÿÿÿÿÿ®¯°ææç’’•Ž‘¬­¯ÿÿÿÿÿÿòòóvwzãããýýý¾¾Àˆ‰‹ÿÿÿ¬­®˜˜šÿÿÿÿÿÿššœ’ýþýåææ¿¿Áýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿííîvwzîïÏÏÑÄÅÆžŸ¡®®°®®°××׌ŒÔÕÕ“”–±±³z{~îïïš›’“•Œ€„ððð†‡ŠžŸ¡……ˆ²³´ÿÿÿº»¼æææ³³µ­®°ÄÄÆÿÿÿÿÿÿôôôvwz{|~‚…qru‘’”ÿÿÿ­®¯™™›ÿÿÿÿÿÿËËÌrsuƒ„‡¦§©ëëëÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿŠ‹±±³€€ƒõõõÂÂÄžŸ¡åæ提’ ¡£¾¿Àçèè ¡¢›œžtuxÍÎÏ|}€ÿÿÿææ燈‹ÁÁËŒŽÿÿÿääåwx{úúûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòówx{ÍÍÎãã䮯±‹ŒŽÿÿÿ­®¯››ÿÿÿÿÿÿÿÿÿððñÉÉÊ—˜štuwññòÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿ¶¶¸bcfŽÿÿÿ½½¿——™ÿÿÿ„…‡noróó󚛝——™ÜÝÝxy|ÔÔÕy{}ááâÉÉÊ€‚ÖÖ×yz}ää帹ºˆ‰‹þþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòóstwèèèÿÿÿÁÁÃ…†ˆÿÿÿ¬­®‹‹Žââãèè觨©¬­¯êêêÆÆÇmnqîîïÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿãääuvx½½¿ÿÿÿÈÈÊ©©«ÿÿÿ²³´––˜ÿÿÿÑÒÓ‘’”¤¥§ˆ‰ŒëëëÊÊËŽ‘™šœ–—™ÿÿÿ¼½¾‡ˆŠ‘‘”ãääÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôôõŒéééÿÿÿÊÊÌ›œžÿÿÿÁÁÃyz}~‚””–ÕÕÖ‘’”€€ƒ‹‹ŽÑÑÒÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿþþþÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿýýþýýýýýýÿÿÿÿÿÿýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýþøÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøÞãä|ö÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýõÛÞ]ÙÛQêëžüüòÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâäyØÛMØÚJÞàgòóÂÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèé“ØÚKÚÜUÙÛOØÚMäæ…ùùâÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî­ØÚKÚÜUÚÜUÚÜUØÛLÛÝZìí©þþøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóôÈØÚMÚÜUÚÜUÚÜUÚÜUÙÛRØÚJàâqôôÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøùáÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÙÛNÙÛOçèúûêÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýôÝßcÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTØÚKÜÞ^ïð¹ÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáãwÙÛOÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOÛÜWûûíÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçèØÚLÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÙÛR÷÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî«ØÚKÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚLñò¿ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóóÇØÚMÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚKëì£ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøßÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚMåç‰ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûÝÞ^ÙÛPÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOàârÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜÜ·ÛÜ€ÛÝTÙÛNÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÜRÜÞ`üüïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËÌ‘‹	ííÜ÷øÕàârØÚKÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÚÜS÷øÚÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾Àw„†�‹	íîÚÿÿÿþþùíî«ÛÝZØÚKÚÜTÚÜUÚÜUÚÜUÚÜUØÚLñòÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúô±²Yƒ…�‰‹
íîÙÿÿÿÿÿÿÿÿÿùùãåæˆØÚMÙÛPÚÜUÚÜUÚÜUØÚJìí¦ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòó䤦@ƒ…�‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòóÄÞàiØÚJÚÜSÚÜUØÚLæç‹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿééЙ›(…†�‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýüóêëžÚÜUØÛMÙÛOáâtÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞÞ¸’†ˆ�‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷÷Ùâä{×ÙHÜÞ]üüñÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÐѝŠŒˆŠ‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþïð¶ÝßbøøÝÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüüîüüîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÃÄ€†ˆ�‰‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôõÐíï´ûûîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýôýýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùòµ·c„†�ŠŒ‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúèîï·æè—äæŽæç•íï´øøàþÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¿Àw€ƒ�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùúæíïµåç’äæŒåç’åç“åç’ãæäæéë¥óôÏüýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿµ¶b„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùäíî³åç‘äæåç’åç“åç“åç“åç“åç“åç“äæãæŒçé™ïð½ùúçÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿøøâìî±äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç‘ãæŒåç‘ëí«õöÖþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒîïÞÿÿÿøùáìî¯äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç’äæŽäæèêžñòÅúûêÿÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ‰‹æçºíï®äæŽäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæáå„èêžúúèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹ŠŒ„†�½¾sõöÜíï´æè–ãæŒäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèéžóôÌýýõÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹ˆŠ‡‰ÈÊŒÿÿÿÿÿÿÿÿÿûüðòóÈéê¡äæŽäæŽåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèêŸóõÎýýøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹	‡‰�ŒŽÖÖ¨ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþúö÷Ùìí®åç‘ãæŒåç‘åç“åç“åç“åç“åç“åç“åç“åç“äæäæŒèê ôõÏþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹†‡�”•âãÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúéðñ¿çé›äæŒäæåç“åç“åç“åç“äæãæŒèë¢ôõÒþþúÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹„†�ž/ííØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýöôõÑêì§äçäæäæŽäæŒéë¤õöÔþþûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹ƒ…�¨ªHõöëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþøùáíïµêì§õöÕþÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�ŠŒƒ†�´¶büü÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸g‚„�…‡�ÂÃÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ´µ_ƒ…ÐÑ›ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾¿v×רÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûöÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽëìÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ…6ce�fg’ŠˆŠÎТþþüþþþþþþþþþþþþþþþþþþÿÿþÿÿþþÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþþÿþÿÿþþþþþþþþþþþþþþþþþþþþþýþþýþþýþþþãäÒsufgfgŽ‘‹ŒŽ’)¡¢P£¥Z¥§b§©jª­q­®x°²²´…¶·‹¹º’¼½˜¿ÁŸÂÄ¥ÆÇ«ÉʱÍζÑÒ¼ÔÕÂ×ØÇÜÜÍßàÓãã×æçÝêêâîîèòòíõõòùùøþþýþþü÷÷ññòçìíÞèéÖãäÎßàÇÛÜ¿Ö׸ÒÓ²ÍϪÈÊ¢ÃÆ›¿Á”¼¼¸¸†³´€¯°z«¬t¦§n£¤iŸ b›œ\–—V’”PJ‹Dxz$ik
hjef�®°]†‰Œ&ŒŽ+Ž4”?”–H—šQšœZž b¡£i¥¦q©ªz¬®°³‰µ·¹»˜½¾ŸÂæÆÇ­ÊË´ÎкÔÔÁØÙÈÜÝÎàáÕååÛéêâïïèóóï÷÷õýýüýýúõõìíïáçè×âãÌÝÞÃØÙºÓÔ°ÌͨÇÉ ÂÄ—¼¾¶¹…°³|¬­s¨§k¢£cž\˜™T’”MŽG‰Š?„…7€0z|)vx!proqlnfh�klõõí®°_‹Ž’*‘“4“•=•˜G˜šP›žYž a¢¤i¥§p©ªx­®°²†´¶Ž¸º•¼¾œÀÁ£ÅƪÉʱÍηÐѽÕÕÄÙÚËÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÏÞßÆÙÚ½ÔÕ´ÏЬÊ̤ÅÆ›¿Á’¹¼Š´¶°±z¬«r¦§j¡¢cœ\—˜U“”OŽG‰Š@„…92{}*wy#sumomnŒuendstream
+endobj
+68 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 44
+  /Height 44
+  /Length 1936
+>>
+stream
+��������������������������������������������������������������������������������������������������3333333333333333333333333'��������������~ÞÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÀc���������?êÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÌ�������9ùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞ�����äÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ±�����rÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ3����Ïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����öÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿº���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÏ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���	ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÌ����ðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ´����½ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ~����Zÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿù!����Ìÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ������çÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿº�������Àÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ“����������H¥Ìùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿí̍0�������������������������������������������������endstream
+endobj
+18 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 44
+  /Height 44
+  /SMask 68 0 R
+  /Length 5808
+>>
+stream
+������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚4Nƒ������������������������������������������3Uˆ3O‚3N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1MUUª���������������������������1M‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3M€���������������������1L‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3N‚1I†���������������+U€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������������1N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2P‚������������3Nƒ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1N‚���������+U€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3Nƒ���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������9UŽ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3O‚������������3O‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚.Mƒ������������UUª2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3N‚������������������3M€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1N‚+U€���������������������3M€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚+U€������������������������������2Nƒ1M‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1M‚2N‚3N‚0P€���������������������������������������������������������������������������������������������������������������������������������������������������endstream
+endobj
+19 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 64
+  /Height 64
+  /Length 12288
+>>
+stream
+ÿÿÿÉË‘“%’*’”6”–?–™H™›QœŸZŸ¡b£¥j¦¨qª«y®¯€±³‡´¶¸º–¼¾ÀÁ¤ÅÆ«ÉʱÍηÑÒ½ÖÖÄÚÛËÞßÑâã×ççÝëëäïïêóóï÷÷õýýüýýúõõíîïãéêÙäåÏßàÆÚÛ½ÕÖµÏЭÊÌ¥ÅÇœ¿Á“¹¼‹µ¶‚±²{­¬s§¨k¢£dž]˜™V”•PHŠ‹B…†;€‚4|~,xz%suln‘NïïèÌÍ”‹Ž‹ŽŽ‘(‘0‘“9“–C–˜L™œUœž^ ¢f£¥m§¨u«¬}®°„²´Œ¶¸“º¼š¾¿¡ÃŨÈɯÌ͵ÐÑ»ÕÕÂÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÎÞßÅÙÚ»ÔÕ²ÎϪÉË¢ÃÆ™½¿·ºˆ²´®¯wª©o¤¥gŸ `š›Y•–Q‘’KŒC‡ˆ<‚ƒ5}.y{&tvprlnbc�‘K“–‰ŒŒŠ!‘1’”;”—E—™NšWŸ_¡£g¤¦n¨©v¬­~¯±…³µ·¹”»½›¿À¢ÄÅ©ÈÉ°ÌͶÐѼÕÕÃÙÚÊÝÞÐáâÖææÜêëãïïéóóï÷÷õýýýýýúõõíîïâèéØãäÎÞßÅÙÚ¼ÔÕ³ÎÏ«ÉË£ÄÆš¾À‘¸»‰³µ€¯°x«ªp¥¦h ¡a›œZ–—S’“MŽEˆ‰>ƒ„7~€/z|(uw!lnkmhjcd�’‹ˆ‹¸ºyêëÙííÞííßíîáîïãïïäïðåððçðñèññéòòëòóìóóíôôïõõðõöñööò÷÷ó÷÷ôøøõùùöúú÷úúùûûúüüûüüüýýüþþþÿÿÿÿÿÿþþüüüúûüùûû÷úúöùùôøøó÷÷ñööðõöïôõíóôëòóêòòèññçððæïïäîîãííâìíáììßëëÞêêÝééÛèéÚèéÚÃÄžlogifg“ŠŠêëØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ3ce�fg“Š‹ŽèéÔÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþ‚„5ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿêëë÷÷÷ÿÿÿýýýééêúúûýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûûìììÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïððááâêêêðððøøøïïðÿÿÿÿÿÿýýýêêëûûûÿÿÿõõöíííÿÿÿòóóïïðÿÿÿÿÿÿÿÿÿûûûäääðððÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿ˜™›««­ÿÿÿÖ×؃„†ÊÊË´µ¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèè邃†ýýýÿÿÿÿÿÿÿÿÿÿÿÿãä䏐“ÆÇÈ–—˜´µ¶ŸŸ¡ÿÿÿÿÿÿòòóxy|æææÿÿÿÁÁÊ‹ÿÿÿ®¯°ššœÿÿÿÿÿÿââã„…ˆ€€ƒuvy¯°±ýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÄÅƃ„†ÿÿÿ¬¬®žž ââãÌÌÍ××ØíííûûûÏÏÐûûûÖÖ׿¿ÁåææÿÿÿúúúÈÉÊ¿ÀÁ‚ƒ†ÿÿÿóóóÂÂÃÖÖ×ÿÿÿÿÿÿ®¯°ææç’’•Ž‘¬­¯ÿÿÿÿÿÿòòóvwzãããýýý¾¾Àˆ‰‹ÿÿÿ¬­®˜˜šÿÿÿÿÿÿššœ’ýþýåææ¿¿Áýýýÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿííîvwzîïÏÏÑÄÅÆžŸ¡®®°®®°××׌ŒÔÕÕ“”–±±³z{~îïïš›’“•Œ€„ððð†‡ŠžŸ¡……ˆ²³´ÿÿÿº»¼æææ³³µ­®°ÄÄÆÿÿÿÿÿÿôôôvwz{|~‚…qru‘’”ÿÿÿ­®¯™™›ÿÿÿÿÿÿËËÌrsuƒ„‡¦§©ëëëÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿŠ‹±±³€€ƒõõõÂÂÄžŸ¡åæ提’ ¡£¾¿Àçèè ¡¢›œžtuxÍÎÏ|}€ÿÿÿææ燈‹ÁÁËŒŽÿÿÿääåwx{úúûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòówx{ÍÍÎãã䮯±‹ŒŽÿÿÿ­®¯››ÿÿÿÿÿÿÿÿÿððñÉÉÊ—˜štuwññòÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿ¶¶¸bcfŽÿÿÿ½½¿——™ÿÿÿ„…‡noróó󚛝——™ÜÝÝxy|ÔÔÕy{}ááâÉÉÊ€‚ÖÖ×yz}ää帹ºˆ‰‹þþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòòóstwèèèÿÿÿÁÁÃ…†ˆÿÿÿ¬­®‹‹Žââãèè觨©¬­¯êêêÆÆÇmnqîîïÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿãääuvx½½¿ÿÿÿÈÈÊ©©«ÿÿÿ²³´––˜ÿÿÿÑÒÓ‘’”¤¥§ˆ‰ŒëëëÊÊËŽ‘™šœ–—™ÿÿÿ¼½¾‡ˆŠ‘‘”ãääÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôôõŒéééÿÿÿÊÊÌ›œžÿÿÿÁÁÃyz}~‚””–ÕÕÖ‘’”€€ƒ‹‹ŽÑÑÒÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿþþþÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿÿÿÿþþþÿÿÿÿÿÿýýþýýýýýýÿÿÿÿÿÿýýýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýþøÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøÞãä|ö÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýõÛÞ]ÙÛQêëžüüòÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâäyØÛMØÚJÞàgòóÂÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèé“ØÚKÚÜUÙÛOØÚMäæ…ùùâÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî­ØÚKÚÜUÚÜUÚÜUØÛLÛÝZìí©þþøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóôÈØÚMÚÜUÚÜUÚÜUÚÜUÙÛRØÚJàâqôôÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøùáÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÙÛNÙÛOçèúûêÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýôÝßcÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTØÚKÜÞ^ïð¹ÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáãwÙÛOÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOÛÜWûûíÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçèØÚLÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÙÛR÷÷Øÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíî«ØÚKÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚLñò¿ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóóÇØÚMÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚKëì£ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøøßÚÜUÚÜTÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUØÚMåç‰ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûÝÞ^ÙÛPÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÛOàârÿÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜÜ·ÛÜ€ÛÝTÙÛNÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÚÜUÙÜRÜÞ`üüïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËÌ‘‹	ííÜ÷øÕàârØÚKÙÛRÚÜUÚÜUÚÜUÚÜUÚÜUÚÜTÚÜS÷øÚÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾Àw„†�‹	íîÚÿÿÿþþùíî«ÛÝZØÚKÚÜTÚÜUÚÜUÚÜUÚÜUØÚLñòÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúô±²Yƒ…�‰‹
íîÙÿÿÿÿÿÿÿÿÿùùãåæˆØÚMÙÛPÚÜUÚÜUÚÜUØÚJìí¦ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòó䤦@ƒ…�‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòóÄÞàiØÚJÚÜSÚÜUØÚLæç‹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿééЙ›(…†�‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýüóêëžÚÜUØÛMÙÛOáâtÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞÞ¸’†ˆ�‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷÷Ùâä{×ÙHÜÞ]üüñÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÐѝŠŒˆŠ‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþïð¶ÝßbøøÝÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüüîüüîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÃÄ€†ˆ�‰‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôõÐíï´ûûîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýôýýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùòµ·c„†�ŠŒ‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúèîï·æè—äæŽæç•íï´øøàþÿýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¿Àw€ƒ�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùúæíïµåç’äæŒåç’åç“åç’ãæäæéë¥óôÏüýôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿµ¶b„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿùùäíî³åç‘äæåç’åç“åç“åç“åç“åç“åç“äæãæŒçé™ïð½ùúçÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ
íîÙÿÿÿÿÿÿÿÿÿøøâìî±äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç‘ãæŒåç‘ëí«õöÖþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒîïÞÿÿÿøùáìî¯äçäæåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç’äæŽäæèêžñòÅúûêÿÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹‹ŠŒ‰‹æçºíï®äæŽäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæáå„èêžúúèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹‹ŠŒ„†�½¾sõöÜíï´æè–ãæŒäæåç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèéžóôÌýýõÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹‹ˆŠ‡‰ÈÊŒÿÿÿÿÿÿÿÿÿûüðòóÈéê¡äæŽäæŽåç’åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“åç“äæãæŒèêŸóõÎýýøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹‹	‡‰�ŒŽÖÖ¨ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþúö÷Ùìí®åç‘ãæŒåç‘åç“åç“åç“åç“åç“åç“åç“åç“äæäæŒèê ôõÏþþùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹‹†‡�”•âãÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿúúéðñ¿çé›äæŒäæåç“åç“åç“åç“äæãæŒèë¢ôõÒþþúÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹‹„†�ž/ííØÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýöôõÑêì§äçäæäæŽäæŒéë¤õöÔþþûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�‹‹ƒ…�¨ªHõöëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþøùáíïµêì§õöÕþÿüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸f„†�ŠŒƒ†�´¶büü÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ·¸g‚„�…‡�ÂÃÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ´µ_ƒ…ÐÑ›ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¾¿v×רÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽèéÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûûöÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþý‚„4ce�fg’Š‹ŽëìÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿƒ…6ce�fg’ŠˆŠÎТþþüþþþþþþþþþþþþþþþþþþÿÿþÿÿþþÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþÿÿþÿÿþÿÿþÿÿþÿÿþþÿþÿÿþþþþþþþþþþþþþþþþþþþþþýþþýþþýþþþãäÒsufgfgŽ‘‹ŒŽ’)¡¢P£¥Z¥§b§©jª­q­®x°²²´…¶·‹¹º’¼½˜¿ÁŸÂÄ¥ÆÇ«ÉʱÍζÑÒ¼ÔÕÂ×ØÇÜÜÍßàÓãã×æçÝêêâîîèòòíõõòùùøþþýþþü÷÷ññòçìíÞèéÖãäÎßàÇÛÜ¿Ö׸ÒÓ²ÍϪÈÊ¢ÃÆ›¿Á”¼¼¸¸†³´€¯°z«¬t¦§n£¤iŸ b›œ\–—V’”PJ‹Dxz$ik
hjef�®°]†‰Œ&ŒŽ+Ž4”?”–H—šQšœZž b¡£i¥¦q©ªz¬®°³‰µ·¹»˜½¾ŸÂæÆÇ­ÊË´ÎкÔÔÁØÙÈÜÝÎàáÕååÛéêâïïèóóï÷÷õýýüýýúõõìíïáçè×âãÌÝÞÃØÙºÓÔ°ÌͨÇÉ ÂÄ—¼¾¶¹…°³|¬­s¨§k¢£cž\˜™T’”MŽG‰Š?„…7€0z|)vx!proqlnfh�klõõí®°_‹Ž’*‘“4“•=•˜G˜šP›žYž a¢¤i¥§p©ªx­®°²†´¶Ž¸º•¼¾œÀÁ£ÅƪÉʱÍηÐѽÕÕÄÙÚËÝÞÐáâÖææÜêëãïïéóóï÷÷õýýüýýúõõíîïâèéØãäÏÞßÆÙÚ½ÔÕ´ÏЬÊ̤ÅÆ›¿Á’¹¼Š´¶°±z¬«r¦§j¡¢cœ\—˜U“”OŽG‰Š@„…92{}*wy#sumomnŒuendstream
+endobj
+7 0 obj
+<<
+  /Type  /Outline
+  /First 20 0 R
+  /Last  20 0 R
+  /Count 1
+>>
+endobj
+xref
+0 69
+0000000000 65535 f 
+0000000009 00000 n 
+0000000238 00000 n 
+0000041558 00000 n 
+0000000388 00000 n 
+0000000515 00000 n 
+0000041677 00000 n 
+0000113213 00000 n 
+0000051701 00000 n 
+0000051722 00000 n 
+0000000725 00000 n 
+0000000866 00000 n 
+0000051855 00000 n 
+0000064722 00000 n 
+0000066007 00000 n 
+0000067064 00000 n 
+0000067780 00000 n 
+0000080230 00000 n 
+0000094778 00000 n 
+0000100763 00000 n 
+0000051543 00000 n 
+0000046304 00000 n 
+0000041943 00000 n 
+0000042205 00000 n 
+0000042516 00000 n 
+0000042827 00000 n 
+0000043089 00000 n 
+0000043351 00000 n 
+0000043613 00000 n 
+0000043875 00000 n 
+0000044144 00000 n 
+0000044415 00000 n 
+0000044686 00000 n 
+0000044957 00000 n 
+0000045228 00000 n 
+0000045499 00000 n 
+0000045770 00000 n 
+0000046041 00000 n 
+0000047084 00000 n 
+0000046465 00000 n 
+0000046609 00000 n 
+0000046772 00000 n 
+0000046938 00000 n 
+0000047257 00000 n 
+0000051211 00000 n 
+0000047432 00000 n 
+0000047622 00000 n 
+0000047845 00000 n 
+0000048068 00000 n 
+0000048280 00000 n 
+0000048492 00000 n 
+0000048704 00000 n 
+0000048916 00000 n 
+0000049117 00000 n 
+0000049320 00000 n 
+0000049523 00000 n 
+0000049726 00000 n 
+0000049929 00000 n 
+0000050132 00000 n 
+0000050335 00000 n 
+0000050538 00000 n 
+0000050746 00000 n 
+0000050906 00000 n 
+0000051068 00000 n 
+0000051384 00000 n 
+0000064305 00000 n 
+0000065666 00000 n 
+0000066723 00000 n 
+0000092680 00000 n 
+trailer
+<<
+  /Size 69
+  /Info 1 0 R
+  /Root 2 0 R
+>>
+startxref
+113295
+%%EOF
diff --git a/notebooks/end2end_example/cnv_end2end_example.ipynb b/notebooks/end2end_example/cnv_end2end_example.ipynb
index adb34f6d12ab9177490c07d67fbabc446eeb46ab..795f7f22fef033381aed00375e6bd1bd45affce8 100644
--- a/notebooks/end2end_example/cnv_end2end_example.ipynb
+++ b/notebooks/end2end_example/cnv_end2end_example.ipynb
@@ -55,7 +55,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
+   "execution_count": 1,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -71,41 +71,31 @@
    "source": [
     "## 1. Brevitas Export, FINN Import and Tidy-Up\n",
     "\n",
-    "Similar to what we did in the TFC-w1a1 end-to-end notebook, we will start by exporting the [pretrained CNV-w1a1 network](https://github.com/maltanar/brevitas_cnv_lfc) to ONNX, importing that into FINN and running the \"tidy-up\" transformations to have a first look at the topology."
+    "Similar to what we did in the TFC-w1a1 end-to-end notebook, we will start by exporting the [pretrained CNV-w1a1 network](https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq) to ONNX, importing that into FINN and running the \"tidy-up\" transformations to have a first look at the topology."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 2,
+   "execution_count": 7,
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/workspace/brevitas_cnv_lfc/training_scripts/models/CNV.py:112: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n",
-      "  x = 2.0 * x - torch.tensor([1.0]).to(self.device)\n"
-     ]
-    }
-   ],
+   "outputs": [],
    "source": [
     "import onnx\n",
     "from finn.util.test import get_test_model_trained\n",
     "import brevitas.onnx as bo\n",
     "from finn.core.modelwrapper import ModelWrapper\n",
-    "from finn.transformation.double_to_single_float import DoubleToSingleFloat\n",
     "from finn.transformation.infer_shapes import InferShapes\n",
     "from finn.transformation.fold_constants import FoldConstants\n",
-    "from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames\n",
+    "from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames, RemoveStaticGraphInputs\n",
     "\n",
     "cnv = get_test_model_trained(\"CNV\", 1, 1)\n",
     "bo.export_finn_onnx(cnv, (1, 3, 32, 32), build_dir + \"/end2end_cnv_w1a1_export.onnx\")\n",
     "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_export.onnx\")\n",
-    "model = model.transform(DoubleToSingleFloat())\n",
     "model = model.transform(InferShapes())\n",
     "model = model.transform(FoldConstants())\n",
     "model = model.transform(GiveUniqueNodeNames())\n",
     "model = model.transform(GiveReadableTensorNames())\n",
+    "model = model.transform(RemoveStaticGraphInputs())\n",
     "model.save(build_dir + \"/end2end_cnv_w1a1_tidy.onnx\")"
    ]
   },
@@ -118,7 +108,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 27,
    "metadata": {},
    "outputs": [
     {
@@ -142,10 +132,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f7b24ef8b00>"
+       "<IPython.lib.display.IFrame at 0x7f25b19194a8>"
       ]
      },
-     "execution_count": 3,
+     "execution_count": 27,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -161,6 +151,105 @@
     "You can see that the network is composed of a repeating convolution-convolution-maxpool layer pattern to extract features using 3x3 convolution kernels (with weights binarized) and `Sign` activations, followed by fully connected layers acting as the classifier. Also notice the initial `MultiThreshold` layer at the beginning of the network, which is quantizing float inputs to 8-bit ones."
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Adding Pre- and Postprocessing <a id='prepost'></a>\n",
+    "\n",
+    "TODO"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/workspace/finn/src/finn/transformation/infer_data_layouts.py:113: UserWarning: Assuming 4D input is NCHW\n",
+      "  warnings.warn(\"Assuming 4D input is NCHW\")\n"
+     ]
+    }
+   ],
+   "source": [
+    "from finn.util.pytorch import ToTensor\n",
+    "from finn.transformation.merge_onnx_models import MergeONNXModels\n",
+    "from finn.core.datatype import DataType\n",
+    "\n",
+    "model = ModelWrapper(build_dir+\"/end2end_cnv_w1a1_tidy.onnx\")\n",
+    "global_inp_name = model.graph.input[0].name\n",
+    "ishape = model.get_tensor_shape(global_inp_name)\n",
+    "# preprocessing: torchvision's ToTensor divides uint8 inputs by 255\n",
+    "totensor_pyt = ToTensor()\n",
+    "chkpt_preproc_name = build_dir+\"/end2end_cnv_w1a1_preproc.onnx\"\n",
+    "bo.export_finn_onnx(totensor_pyt, ishape, chkpt_preproc_name)\n",
+    "\n",
+    "# join preprocessing and core model\n",
+    "pre_model = ModelWrapper(chkpt_preproc_name)\n",
+    "model = model.transform(MergeONNXModels(pre_model))\n",
+    "# add input quantization annotation: UINT8 for all BNN-PYNQ models\n",
+    "global_inp_name = model.graph.input[0].name\n",
+    "model.set_tensor_datatype(global_inp_name, DataType.UINT8)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Stopping http://0.0.0.0:8081\n",
+      "Serving '/workspace/finn/end2end_cnv_w1a1_pre_post.onnx' at http://0.0.0.0:8081\n"
+     ]
+    },
+    {
+     "data": {
+      "text/html": [
+       "\n",
+       "        <iframe\n",
+       "            width=\"100%\"\n",
+       "            height=\"400\"\n",
+       "            src=\"http://0.0.0.0:8081/\"\n",
+       "            frameborder=\"0\"\n",
+       "            allowfullscreen\n",
+       "        ></iframe>\n",
+       "        "
+      ],
+      "text/plain": [
+       "<IPython.lib.display.IFrame at 0x7f25b1919518>"
+      ]
+     },
+     "execution_count": 29,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from finn.transformation.insert_topk import InsertTopK\n",
+    "from finn.transformation.infer_datatypes import InferDataTypes\n",
+    "\n",
+    "# postprocessing: insert Top-1 node at the end\n",
+    "model = model.transform(InsertTopK(k=1))\n",
+    "chkpt_name = build_dir+\"/end2end_cnv_w1a1_pre_post.onnx\"\n",
+    "# tidy-up again\n",
+    "model = model.transform(InferShapes())\n",
+    "model = model.transform(FoldConstants())\n",
+    "model = model.transform(GiveUniqueNodeNames())\n",
+    "model = model.transform(GiveReadableTensorNames())\n",
+    "model = model.transform(InferDataTypes())\n",
+    "model = model.transform(RemoveStaticGraphInputs())\n",
+    "model.save(chkpt_name)\n",
+    "\n",
+    "showInNetron(build_dir+\"/end2end_cnv_w1a1_pre_post.onnx\")"
+   ]
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
@@ -179,7 +268,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
+   "execution_count": 30,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -187,15 +276,22 @@
     "from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul\n",
     "from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount\n",
     "import finn.transformation.streamline.absorb as absorb\n",
-    "from finn.transformation.streamline.reorder import MakeMaxPoolNHWC\n",
+    "from finn.transformation.streamline.reorder import MakeMaxPoolNHWC, MoveScalarLinearPastInvariants\n",
+    "from finn.transformation.infer_data_layouts import InferDataLayouts\n",
+    "from finn.transformation.general import RemoveUnusedTensors\n",
     "\n",
-    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_tidy.onnx\")\n",
+    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_pre_post.onnx\")\n",
+    "model = model.transform(MoveScalarLinearPastInvariants())\n",
     "model = model.transform(Streamline())\n",
     "model = model.transform(LowerConvsToMatMul())\n",
     "model = model.transform(MakeMaxPoolNHWC())\n",
     "model = model.transform(absorb.AbsorbTransposeIntoMultiThreshold())\n",
     "model = model.transform(ConvertBipolarMatMulToXnorPopcount())\n",
     "model = model.transform(Streamline())\n",
+    "# absorb final add-mul nodes into TopK\n",
+    "model = model.transform(absorb.AbsorbScalarMulAddIntoTopK())\n",
+    "model = model.transform(InferDataLayouts())\n",
+    "model = model.transform(RemoveUnusedTensors())\n",
     "model.save(build_dir + \"/end2end_cnv_w1a1_streamlined.onnx\")"
    ]
   },
@@ -215,7 +311,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 31,
    "metadata": {},
    "outputs": [
     {
@@ -241,10 +337,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f0cb1098f28>"
+       "<IPython.lib.display.IFrame at 0x7f25b19a9470>"
       ]
      },
-     "execution_count": 5,
+     "execution_count": 31,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -264,7 +360,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 32,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -272,8 +368,9 @@
     "from finn.transformation.fpgadataflow.create_dataflow_partition import (\n",
     "    CreateDataflowPartition,\n",
     ")\n",
-    "from finn.transformation.move_reshape import MoveReshape\n",
+    "from finn.transformation.move_reshape import RemoveCNVtoFCFlatten\n",
     "from finn.custom_op.registry import getCustomOp\n",
+    "from finn.transformation.infer_data_layouts import InferDataLayouts\n",
     "\n",
     "# choose the memory mode for the MVTU units, decoupled or const\n",
     "mem_mode = \"decoupled\"\n",
@@ -281,10 +378,18 @@
     "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_streamlined.onnx\")\n",
     "model = model.transform(to_hls.InferBinaryStreamingFCLayer(mem_mode))\n",
     "model = model.transform(to_hls.InferQuantizedStreamingFCLayer(mem_mode))\n",
+    "# TopK to LabelSelect\n",
+    "model = model.transform(to_hls.InferLabelSelectLayer())\n",
+    "# input quantization (if any) to standalone thresholding\n",
+    "model = model.transform(to_hls.InferThresholdingLayer())\n",
     "model = model.transform(to_hls.InferConvInpGen())\n",
     "model = model.transform(to_hls.InferStreamingMaxPool())\n",
     "# get rid of Reshape(-1, 1) operation between hlslib nodes\n",
-    "model = model.transform(MoveReshape())\n",
+    "model = model.transform(RemoveCNVtoFCFlatten())\n",
+    "# get rid of Tranpose -> Tranpose identity seq\n",
+    "model = model.transform(absorb.AbsorbConsecutiveTransposes())\n",
+    "# infer tensor data layouts\n",
+    "model = model.transform(InferDataLayouts())\n",
     "parent_model = model.transform(CreateDataflowPartition())\n",
     "parent_model.save(build_dir + \"/end2end_cnv_w1a1_dataflow_parent.onnx\")\n",
     "sdp_node = parent_model.get_nodes_by_op_type(\"StreamingDataflowPartition\")[0]\n",
@@ -299,12 +404,61 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Notice the additional `MoveReshape` transformation that was not used for TFC-w1a1. In the last Netron visualization you may have noticed a `Reshape` operation towards the end of the network where the convolutional part of the network ends and the fully-connected layers started. That `Reshape` is essentialy a tensor flattening operation, which we can remove for the purposes of hardware implementation. We can examine the contents of the dataflow partition with Netron, and observe the `ConvolutionInputGenerator`, `StreamingFCLayer_Batch` and `StreamingMaxPool_Batch` nodes that implement the sliding window, matrix multiply and maxpool operations in hlslib. *Note that the StreamingFCLayer instances following the ConvolutionInputGenerator nodes are really implementing the convolutions, despite the name. The final three StreamingFCLayer instances implement actual FC layers.*"
+    "Notice the additional `RemoveCNVtoFCFlatten` transformation that was not used for TFC-w1a1. In the last Netron visualization you may have noticed a `Reshape` operation towards the end of the network where the convolutional part of the network ends and the fully-connected layers started. That `Reshape` is essentialy a tensor flattening operation, which we can remove for the purposes of hardware implementation. We can examine the contents of the dataflow partition with Netron, and observe the `ConvolutionInputGenerator`, `StreamingFCLayer_Batch` and `StreamingMaxPool_Batch` nodes that implement the sliding window, matrix multiply and maxpool operations in hlslib. *Note that the StreamingFCLayer instances following the ConvolutionInputGenerator nodes are really implementing the convolutions, despite the name. The final three StreamingFCLayer instances implement actual FC layers.*"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 36,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Stopping http://0.0.0.0:8081\n",
+      "Serving '/workspace/finn/end2end_cnv_w1a1_dataflow_parent.onnx' at http://0.0.0.0:8081\n"
+     ]
+    },
+    {
+     "data": {
+      "text/html": [
+       "\n",
+       "        <iframe\n",
+       "            width=\"100%\"\n",
+       "            height=\"400\"\n",
+       "            src=\"http://0.0.0.0:8081/\"\n",
+       "            frameborder=\"0\"\n",
+       "            allowfullscreen\n",
+       "        ></iframe>\n",
+       "        "
+      ],
+      "text/plain": [
+       "<IPython.lib.display.IFrame at 0x7f25b18b7668>"
+      ]
+     },
+     "execution_count": 36,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "showInNetron(build_dir + \"/end2end_cnv_w1a1_dataflow_parent.onnx\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Note that pretty much everything has gone into the `StreamingDataflowPartition` node; the only operation remaining is to apply a `Transpose` to obtain NHWC input from a NCHW input (the ONNX default). "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
    "metadata": {},
    "outputs": [
     {
@@ -330,10 +484,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f0cb063e208>"
+       "<IPython.lib.display.IFrame at 0x7f25b18fe860>"
       ]
      },
-     "execution_count": 7,
+     "execution_count": 33,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -351,14 +505,10 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 34,
    "metadata": {},
    "outputs": [],
    "source": [
-    "from finn.transformation.fpgadataflow.insert_dwc import InsertDWC\n",
-    "from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker\n",
-    "from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO\n",
-    "\n",
     "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_dataflow_model.onnx\")\n",
     "fc_layers = model.get_nodes_by_op_type(\"StreamingFCLayer_Batch\")\n",
     "# each tuple is (PE, SIMD, in_fifo_depth) for a layer\n",
@@ -386,9 +536,6 @@
     "    simd = folding[i][1]\n",
     "    swg_inst.set_nodeattr(\"SIMD\", simd)\n",
     "\n",
-    "model = model.transform(InsertDWC())\n",
-    "model = model.transform(InsertFIFO())\n",
-    "model = model.transform(InsertTLastMarker())\n",
     "model = model.transform(GiveUniqueNodeNames())\n",
     "model.save(build_dir + \"/end2end_cnv_w1a1_folded.onnx\")"
    ]
@@ -402,7 +549,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
+   "execution_count": 35,
    "metadata": {},
    "outputs": [
     {
@@ -428,10 +575,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f0cb1098748>"
+       "<IPython.lib.display.IFrame at 0x7f252e5a6278>"
       ]
      },
-     "execution_count": 9,
+     "execution_count": 35,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -453,94 +600,21 @@
    "source": [
     "## 4. Hardware Generation\n",
     "\n",
-    "From this point onward, the steps we have to follow do not depend on the particular network and will be exactly the same as the TFC-w1a1 example. We first proceed with HLS synthesis, **which may take 10-20 minutes depending on your host computer**."
+    "From this point onward, the steps we have to follow do not depend on the particular network and will be exactly the same as the TFC-w1a1 example. **which may take about 30 minutes depending on your host computer**. For more details about what's going on in this step, please consult the [TFC end-to-end notebook](tfc_end2end_example.ipynb) or the appropriate section in the [FINN documentation](https://finn.readthedocs.io/en/latest/hw_build.html)."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 10,
+   "execution_count": 20,
    "metadata": {},
    "outputs": [],
    "source": [
-    "from finn.transformation.fpgadataflow.prepare_ip import PrepareIP\n",
-    "from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP\n",
-    "from finn.util.basic import pynq_part_map\n",
-    "\n",
     "test_pynq_board = \"Pynq-Z1\"\n",
-    "test_fpga_part = pynq_part_map[test_pynq_board]\n",
-    "target_clk_ns = 5\n",
+    "target_clk_ns = 10\n",
     "\n",
-    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_folded.onnx\")\n",
-    "model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))\n",
-    "model = model.transform(HLSSynthIP())\n",
-    "model.save(build_dir + \"/end2end_cnv_w1a1_ipgen.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Once the HLS synthesis is complete, we can stitch together the generated IP blocks into a larger IP that is the implementation of our network:"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from finn.transformation.fpgadataflow.replace_verilog_relpaths import (\n",
-    "    ReplaceVerilogRelPaths,\n",
-    ")\n",
-    "from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP\n",
-    "\n",
-    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_ipgen.onnx\")\n",
-    "model = model.transform(ReplaceVerilogRelPaths())\n",
-    "model = model.transform(CreateStitchedIP(test_fpga_part))\n",
-    "model.save(build_dir + \"/end2end_cnv_w1a1_ipstitch.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Finally, we create a PYNQ project that includes the hardware \"shell\" that will support our accelerator, including the data movers, and run Vivado synthesis, **which may take around 30 minutes depending on your host computer.**\n",
-    "\n",
-    "*If you'd like to watch the progress, you can open the generated project file (printed below) with the Vivado GUI.*"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Vivado synthesis project is at /tmp/finn_dev_maltanar/vivado_pynq_proj_96qtjweo/resizer.xpr\n"
-     ]
-    }
-   ],
-   "source": [
-    "from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject\n",
-    "from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject\n",
-    "\n",
-    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_ipstitch.onnx\")\n",
-    "model = model.transform(MakePYNQProject(test_pynq_board))\n",
-    "vivado_proj = model.get_metadata_prop(\"vivado_pynq_proj\")\n",
-    "print(\"Vivado synthesis project is at %s/resizer.xpr\" % vivado_proj)\n",
-    "model.save(build_dir + \"/end2end_cnv_w1a1_pynqproj.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_pynqproj.onnx\")\n",
-    "model = model.transform(SynthPYNQProject())\n",
+    "from finn.transformation.fpgadataflow.make_zynq_proj import ZynqBuild\n",
+    "model = ModelWrapper(build_dir+\"/end2end_cnv_w1a1_folded.onnx\")\n",
+    "model = model.transform(ZynqBuild(platform = test_pynq_board, period_ns = target_clk_ns))\n",
     "model.save(build_dir + \"/end2end_cnv_w1a1_synth.onnx\")"
    ]
   },
@@ -555,74 +629,94 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 29,
+   "execution_count": 21,
    "metadata": {},
    "outputs": [],
    "source": [
     "import os\n",
-    "from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver\n",
     "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n",
     "\n",
     "# set up the following values according to your own environment\n",
     "# FINN will use ssh to deploy and run the generated accelerator\n",
-    "ip = os.getenv(\"PYNQ_IP\", \"192.168.1.99\")\n",
+    "ip = os.getenv(\"PYNQ_IP\", \"192.168.2.99\")\n",
     "username = os.getenv(\"PYNQ_USERNAME\", \"xilinx\")\n",
     "password = os.getenv(\"PYNQ_PASSWORD\", \"xilinx\")\n",
     "port = os.getenv(\"PYNQ_PORT\", 22)\n",
-    "target_dir = os.getenv(\"PYNQ_TARGET_DIR\", \"/home/xilinx/finn\")\n",
+    "target_dir = os.getenv(\"PYNQ_TARGET_DIR\", \"/home/xilinx/finn_cnv_end2end_example\")\n",
     "\n",
     "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_synth.onnx\")\n",
-    "model = model.transform(MakePYNQDriver())\n",
     "model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))\n",
     "model.save(build_dir + \"/end2end_cnv_w1a1_pynq_deploy.onnx\")"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 30,
+   "execution_count": 23,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "'/home/xilinx/finn_dev_maltanar/pynq_deployment_obskagv5'"
+      ]
+     },
+     "execution_count": 23,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "target_dir_pynq = target_dir + \"/\" + model.get_metadata_prop(\"pynq_deployment_dir\").split(\"/\")[-1]\n",
+    "target_dir_pynq"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "total 4260\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    6380 May  7 15:14 driver.py\r\n",
-      "drwxr-xr-x 4 xilinx xilinx    4096 May  7 15:14 finn\r\n",
-      "-rw-r--r-- 1 xilinx xilinx 4045675 May  7 15:14 resizer.bit\r\n",
-      "-rw-r--r-- 1 xilinx xilinx  302015 May  7 15:14 resizer.hwh\r\n"
+      "total 4216\r\n",
+      "-rw-r--r-- 1 xilinx xilinx    8508 Sep 21 13:19 driver.py\r\n",
+      "drwxr-xr-x 4 xilinx xilinx    4096 Sep 21 13:19 finn\r\n",
+      "-rw-r--r-- 1 xilinx xilinx 4045671 Sep 21 13:19 resizer.bit\r\n",
+      "-rw-r--r-- 1 xilinx xilinx  246205 Sep 21 13:19 resizer.hwh\r\n",
+      "-rw-r--r-- 1 xilinx xilinx    1727 Sep 21 13:19 validate.py\r\n"
      ]
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh {username}@{ip} -p {port} 'ls -l {target_dir}/*'"
+    "! sshpass -p {password} ssh {username}@{ip} -p {port} 'ls -l {target_dir_pynq}'"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We only have two more steps to be able to remotely execute the deployed bitfile with some test data from the CIFAR-10 dataset. Let's load up some test data that comes bundled with FINN -- and before you ask, that's supposed to be a cat (CIFAR-10 class number 3)."
+    "We only have two more steps to be able to remotely execute the deployed bitfile with some test data from the CIFAR-10 dataset. Let's load up some test data that comes bundled with FINN -- *and before you ask, that's supposed to be a cat (CIFAR-10 class number 3)*."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 19,
+   "execution_count": 40,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "<matplotlib.image.AxesImage at 0x7f0c2b2c6908>"
+       "<matplotlib.image.AxesImage at 0x7f25af026da0>"
       ]
      },
-     "execution_count": 19,
+     "execution_count": 40,
      "metadata": {},
      "output_type": "execute_result"
     },
     {
      "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD5CAYAAADhukOtAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAe7ElEQVR4nO2daYyc13Wm31NfLb1vbLLZXEVJlBVZiSmF1tiJRpGdcaAoCWQDgccewFAAIwqCCIiBzA/BA4w9wPxwBmMb/jHwgB5rrBgeyxrbgoREyNiWgwiGHUnURi3UQnGRSDbZJJu9d+1nflTJQ2nue7vJZlfTvu8DEKy+p+/3nbr1nfqq71vnHHN3CCF+/cmttwNCiM6gYBciERTsQiSCgl2IRFCwC5EICnYhEiG/mslmdgeArwHIAPwPd/9S7Pf7u/O+YaAYPlb8PBftW0xSdHBb9FxkWvR4/Ghxo8feh2P+h20WOxmZAwAxZfbSZFvuR+xo7hd/DbSOydaD04w+6UvzI/bsmKUZcYP5OLNQx1KlEXTykoPdzDIA/w3AxwAcB/C0mT3q7q+wORsGivjCv7s+fDxv0nMVC2E3LccDolqtUFu9UePnKobfjACg0Qz76JFXxXINastl1ASv9fJjgh+zUCwHx7PIS2057n+jWae2Wp2/Zs0mCQrjftTD1ygAoMKOh+UCN+xj7E29WuXXR6MRWcfINZyLvGZVcl0t8KXHYjV8vG//5ETEh0vnFgCH3P2wu1cBPAjgrlUcTwixhqwm2LcCePuCn4+3x4QQVyBrvkFnZveY2X4z2z+/FPlcIoRYU1YT7CcAbL/g523tsXfh7vvcfa+77+3rXtV+oBBiFawm2J8GsNvMdplZEcCnADx6edwSQlxuLvlW6+51M7sXwP9BS3q7391fjs6BoUreX9yX+ESyW1kC37HOgW915/ORHfJLULyswCdVqlVqqzcjPkaktyyyi58n06zJd5hR58pFbBe5GfG/al3B8UZW4nNix2vw9bAm99GImtAVec3yxm25fES5qEXW2PifsE7W2CM6Q5aFfYwpE6v6XO3ujwF4bDXHEEJ0Bn2DTohEULALkQgKdiESQcEuRCIo2IVIhA5/y8XhLLHCufzjjfAca3CpplnjklfWHZFxwJMZmOTVjEg/xUKB2urObc1a5LlFzlevh20WyeTKRWQ+y3hikGdheQ0Alhphie3UOS5PLVS5j/PzfF7mfD36u8LrWDT+Og/0dFNbd4lLaM0cv+ZyURkt7CO/OoAaS76KaG+6swuRCAp2IRJBwS5EIijYhUgEBbsQidDR3XhzR75Bdt2zyG4xSeIoZZH8+HxsWzKS6EASDADQRJh6rFhYjvtRKPJd381XXUdts9Nnqe3sucXwufJ8Vz2HSHJKnV8iS879P3gs7KOXRuicWsYTm6p9fOd/fmaK2k5MTgfH+0r8eTVOhecAwI4xvo4b+vk6duVj5azC13Excgk3iAIRK7elO7sQiaBgFyIRFOxCJIKCXYhEULALkQgKdiESYR3KvYalAcsP8RlETqjHOnDkuCxXrfOEhWKkRlqjQWqFRRJTEJFCipE6aP/q33yM2p75+S+o7eT0ueD4QkRCqze45HXs+BlqO3KCdx8pDY0Hx7eN7aJzvNRPbdU8f10KfRuprV6eD46fmzxJ5/QMcXnw+PxpaiuTWokAMNbP01p6CuFEmEYtLKMCAGviE+nkpTu7EKmgYBciERTsQiSCgl2IRFCwC5EICnYhEmFV0puZHQUwB6ABoO7ue2O/37QcKrmwvDKz2EPnNUh7ouE+Lq8NZFwOy0fqsTUjshyTNWhdPcSz6BYXz1PbT//+EWo7Pc3r9Z2eD5/v2Al+rmMTb1Nb1tVHbY1sgNp6B0aD44Uefrx8F8+iK0VaMnXluHR4thpuKza+bQedU15aoLYjR7j0NjVTprbM+PO+amPYVmhwKc9YXcaI1Hs5dPaPuDvPuRRCXBHoY7wQibDaYHcAPzKzZ8zsnsvhkBBibVjtx/hb3f2EmW0C8GMze9Xdn7jwF9pvAvcAwHA/r/IhhFhbVnVnd/cT7f8nATwM4JbA7+xz973uvrevex2+ii+EALCKYDezXjPrf+cxgD8A8NLlckwIcXlZza12DMDD7a3+PID/5e7/GJtQbxrOLIUzfKZqPOvtiZ//c3D8N3ZzyeUj7w9LPwAwHClu2SSZbQCQI216cjme0dRw3rYooibhyLEj1Da1xDPAvGc4OJ71ceknNzxHbd1Dg9RWLXOpqUraKw0M89dsoI/bJk+dorbZ87zgZH8xfIl3dXOZ763zXFwq9G+itjOn3qK2vtN8jTcPhH3ptkimIinCioisfMnB7u6HAXzgUucLITqLpDchEkHBLkQiKNiFSAQFuxCJoGAXIhE62+stKyE/GC44uHiOv+/UiuGCglOLYSkMABarvDfYQJFntjVJ3622MTicZTxjr1zlEs8ZnryGs3NcAowVRBzeGM7mWmjO0jmj4D5mkUy0aoGvY3khLDWV57kfO8c2UNsikdAAYJJktgGAFcIy5cwUL+aISAHRpQWeEZcV+XUwOcuzDidIttzOUX5951hCXKzFITcJIX6dULALkQgKdiESQcEuRCIo2IVIhI7uxnd19+J9v/X/ZcECAI7/y2t0Xt9geDf+lg+HjwUAPdkxaquSnWIAyOV5UosVwjvTDedJPP2btlPb8wcOUVvfEN+Z3rrz/dTmufDucyGyc96shFtGAUC1GmmxFVmrjCRxvPzCATpnoBRpkdTLk2R6I3XtTp4K14yrE2UFADKygw8Aw/1cnZhp8KSn81PcduTUTHB8y9hmOifPFKVIdpXu7EIkgoJdiERQsAuRCAp2IRJBwS5EIijYhUiEjkpvuSyPnsGwpLTz6uvovCWiWuzYdS2dM1rj0sr0ES7L1SKJMI16ONHhlts+TufsuJp3xNr1m0ep7ZnnXqC24T4uyZycDNdPyzsv410qcMkLfBkxH0kKmSF14YZ7+bkip0IjIpWNbgxLswBQqYVfz7Pnw3IXAFikZVd/pE5ePuPhVC3zxJvDbx8Pjm8c4jLf7m3hNmoeuX/rzi5EIijYhUgEBbsQiaBgFyIRFOxCJIKCXYhEWFZ6M7P7AfwxgEl3v7E9NgLgewCuAnAUwCfdnRfZeudYuRyyUjhD6eTpg3Tent/+YHC8d5DX/MrmTlBbox5pkROpdXb47XC23K3D4bp6AICebdTU38vlmK48z+TqjtQ66yqSjK1IXbWtW8ap7ZU336S2YpHX+ZudC6/VVdt20znXXX8DtU1N8curb4BnHZ48NRkctxyv7zY0zGv8zURqyWURya67h/u4NBe+Dg6R6w0Auovhc9XqkSxFavl/fAvAHe8Zuw/A4+6+G8Dj7Z+FEFcwywZ7u9/6e78hcReAB9qPHwDAv1UihLgiuNS/2cfcfaL9+BRaHV2FEFcwq96gc3dH5JuOZnaPme03s/0zM7xmuBBibbnUYD9tZuMA0P4/vAsCwN33ufted987ODhwiacTQqyWSw32RwHc3X58N4BHLo87Qoi1YiXS23cB3A5g1MyOA/gCgC8BeMjMPgvgGIBPruRkZhkKXeG7e7nMCyJWKuG0t0JEgurp5Z8ieiMtjUoZz3rry4f7NX1r3zfpnD/5t/dSW2HhFLUVS5HspRz3cdfVW4Pjk1Mn6ZzyPM9e27xplNqmZrl0WKmGX8+rr+WZitdcyzMfZ557ltoW5uapbXYh7GO9wSWqpaVwOyYAGBoapLaGc6lsYIhn+9Wr4dczy/H+YMcnwh+mqyTLD1hBsLv7p4np95ebK4S4ctA36IRIBAW7EImgYBciERTsQiSCgl2IROhowUmYwbKwBLEYkX/Ki0vB8UKkJ9fcOZ7lhYxLbwXwQoTjQ+FMqTcO8p5tJ49zGxa5HHbs+FFqu2kz73G3dWe4GOWWSf6N5oVDvADnSCnSx26Iy3KHDx8Njo9vCUuDADA9y79hWYtIZafP8F51TbfguEWKQy5GpDfL8esqfKYWvZFClWiGs+yKFr7uAaB6LizbeqRsp+7sQiSCgl2IRFCwC5EICnYhEkHBLkQiKNiFSITOSm8OgPTsypxLK+Oj4f5wPV1cevvpAV4ocThSlG/3CM9O6iqFZZdinks1ZyaPUluzwosX7riGF7HMIs+7Z2A4OD46xgtfnpviWWMzkcy2RkTd3Ej6r+UjcmmZZH8B8WyupTLPDqsTJ9k4AJQrPAOzXuf3xw2jm6jNjF9XRQtfPyWL9B30cMZnIVL0Und2IRJBwS5EIijYhUgEBbsQiaBgFyIROrobbwYU8uFkksE+npwy1B+2WZPvVs46Tzw4e56nLIz28yXpLYZ3VBu5cI08ADh68ii1jQ3zemY7r+WtkMr8dHjqmXAbrRMTfOe/vy+8gw8AhQJv8fTyobe4I+Q+0ozcXyqR3fj5BZ4UMjTC2zXVSSLMxGlaEBm9/fx1yWc80aSnh9dELLK2XABQCyfyNBam6ZSxTf3B8XyBt7XSnV2IRFCwC5EICnYhEkHBLkQiKNiFSAQFuxCJsJL2T/cD+GMAk+5+Y3vsiwD+HMCZ9q993t0fW8kJMwtLIZs3hWuntZwkMk4kAWJ8G08k2R+Rw6aNS3aehevkDY7ypIrBAZ4AUegKyycAcFVEeusbDCcGAcD/vP/bwfHFyFrNLk1R2+ISrw1YiFw9m4fDz7s8xevdLZBEIwAYHOCvy6uvvUFtp0+fCY7PRlpGDQ3xJzbQ20dtmXNNtFDl65iRWoQbe/nxBrvCcZSP3L5Xcmf/FoA7AuNfdfc97X8rCnQhxPqxbLC7+xMA+Fu/EOJXgtX8zX6vmR0ws/vNjH8FSwhxRXCpwf51ANcA2ANgAsCX2S+a2T1mtt/M9k9P86//CSHWlksKdnc/7e4Nd28C+AYA2rXA3fe5+1533zs0xBsOCCHWlksKdjMbv+DHTwB46fK4I4RYK1YivX0XwO0ARs3sOIAvALjdzPagVVXuKIC/WMnJcrkczf4ZGObSW70RdrOU55lE1+3aQW37n+GS12zhWmpr2lxwfGwrl9deOfgv1PY7v/dn1PaLn/N5CwuRNknVs8HxyVNv0zmx9/z5GrflwaWh4Vw4y25rN/d95gyX0OoZ3xYa28RtjUY4k24p0uKpvMTr7i1EaujVm1zOq5VPUNumQjijb0sfz6Kr1MNzYnfvZYPd3T8dGP7mcvOEEFcW+gadEImgYBciERTsQiSCgl2IRFCwC5EIHS04mcvl0NsXzl4aHh2l8+oWdrOcK9I5XX0D1DY0xAsKvvX2KWq79YPvD/sxz9tJ9fSHs64AYOLEcWo79Prr1FZv8PZEOVJvcGF2hs7p3zBObTMzXIYa7OPFKN933Y3B8adfeJXOefbVo9R26+1/SG2FIpeoDh86FByfmePPK1YUs7zE5bWdY1zS7e7lBVVHRsLzPM8LcNar4cKXTrJKAd3ZhUgGBbsQiaBgFyIRFOxCJIKCXYhEULALkQgdld7cm2jWw5LH4Agv5LewFC5EuNjgfbeyjL+P7di+jdpef5lnXs0shiW2vl6eYbf9GmrCsdd58cUTJyeo7cMf/iC1LS6GpaH+LVvpnJEtvDjnW1NcKluqcMmx2BvuvzawcTudc1M/f13OnAn3QwOAo8deoLaFpbBMOT3DJbSNGzdS26Dz12VnH5dENw3wHmwFC2cCVmu8v10vkdhy4DGhO7sQiaBgFyIRFOxCJIKCXYhEULALkQgd3Y1v1muYOxfezeyO1PaqlMO7nNbk7pvxXcnREd4+6fXcYWqbnAq38DmX8V3pwT5eW+/6G3lCzuFjvGZcjXdJwvRsWO3YvXs3nbN7F5cMjk3wBJqXX36R2s6dDSenFEtcdRnu44kkx1/mqsCpc7yunZFkqSzSeivWOmwnzzPBjn6eGNSV40ktlXL4+mk2eW3DWp0cj1/2urMLkQoKdiESQcEuRCIo2IVIBAW7EImgYBciEVbS/mk7gL8DMIbWxv4+d/+amY0A+B6Aq9BqAfVJdw/3/GlTqVRw+FBY2tqx+zfovK5cWHprVnmiQL4rIoNEbP39XBrqGwjXtbv++vfROT/50WPUtjjD6931jGyitkPHJ6lt+7ZwUs6u991M55SK/DK4egdP8pme4i/3KwfDCUVN57rhiWmeSDJLkqEAoNzgsu3sdFiK3LSZJ928dY7XpxvZzuXScyXuB5r8uU3Xw8/N8/w6rZDjVcETblZyZ68D+Bt3vwHAhwD8lZndAOA+AI+7+24Aj7d/FkJcoSwb7O4+4e7Pth/PATgIYCuAuwA80P61BwB8fK2cFEKsnov6m93MrgJwE4AnAYy5/zK59xRaH/OFEFcoKw52M+sD8AMAn3P3d30/0d0d5It6ZnaPme03s/1zc7xggBBibVlRsJtZAa1A/467/7A9fNrMxtv2cQDBXSN33+fue919b2zzSwixtiwb7GZmaPVjP+juX7nA9CiAu9uP7wbwyOV3TwhxuVhJ1tvvAvgMgBfN7Pn22OcBfAnAQ2b2WQDHAHxyuQMtVup4/lBYNtpx4y10XhPhbDNjmT8A0OTpP7Nzc9Q2PX2W2jaM7AmO33nHR+icPR+4ntoe+uHD1GbGJZTBwWFq27olLCn1DQzROVk9vL4AMLKZXyLju2rUNtMdlo2ee4HXi5uY5yllXuDtvAY38yzG0WvCUlkWkbUazv14zcPtywDg0CkuDxYzfsylcjk4vhi5vOvN8PUx1+DZgcsGu7v/DADz9PeXmy+EuDLQN+iESAQFuxCJoGAXIhEU7EIkgoJdiEToaMHJcsPw+kx30Ha2wQsAeiEsTeSqvBiiE2kCAHI5btsyzrPN/vXvhDPHugpcctm1k7dd+qM//RS1ff/hf6C2s6f4856YCRcvLJcP0TlFcI1naonbDh3jWXuohmU5H+UZgsObwkUqAaAZqaTY+s4XmdcVPmbTwoUoAaAWaSs20+Dn6irwY3blufS2YOEsu1qBn8ub4fVtRCRb3dmFSAQFuxCJoGAXIhEU7EIkgoJdiERQsAuRCB2V3ioNw+vT4feXR37G+4bt2TkaHN9c5BlIPYVIttZm3n9tfJRnV11zNSlS6LyY4MSZc9R2/4NcXnv2+VeojfW+AwCaCOj8fd0b/HiNEl+PRo5LQ3mEJdZ6RBqq58JzAKArdqVGstTK1fDz9hyfk49kxGVN3tfPy1ymrIPPKzTDPmbGX7NqLex/pMWh7uxCpIKCXYhEULALkQgKdiESQcEuRCJ0dDe+AcN8Lpws8Pizr9N5b7wZbhl1x2/fQOdcs4W36TlyONyaCABu++CN1NZFEhPmqnyH+aF/fJrannvlJLUt1iOthCK7xblC+P27GanJlzO+ixzbtW40eQJQheww1xp8jhmvaVdBJCnE+XPL58lOd8bvcz09PKGlCO5/g2+4o2E81BpkYr3GX5dif7imoOX4eXRnFyIRFOxCJIKCXYhEULALkQgKdiESQcEuRCIsK72Z2XYAf4dWS2YHsM/dv2ZmXwTw5wDOtH/18+7+WPRk+Tw2jG4M2qbOc/lk4vx0cPznL/BWN43azognXFrZuJkkuwCwLCyHPbX/JTrnH376C2qrNHnNNeS59JbLXfx7dKPCk108Iss1I/JaTPJiLZQKeX7JWcYlTGT8NctH5mVZ+HyxJqNZZH1zzuXBRiTZqBmRDplmt3kzl4/7B8K2N0uRdeIe/JI6gL9x92fNrB/AM2b247btq+7+X1dwDCHEOrOSXm8TACbaj+fM7CAAXjJVCHFFclGfB83sKgA3AXiyPXSvmR0ws/vNjLcWFUKsOysOdjPrA/ADAJ9z91kAXwdwDYA9aN35v0zm3WNm+81sf32Jt0oWQqwtKwp2a1Xh/wGA77j7DwHA3U+7e8PdmwC+ASDYYN3d97n7Xnffm+/mjSCEEGvLssFuZgbgmwAOuvtXLhgfv+DXPgGAb0kLIdadlezG/y6AzwB40cyeb499HsCnzWwPWnLcUQB/sdyBzIzKJIUCl5rq5bCccPT0LJ1TWThIbbfdfB21dQ+NU9tMOSyR/POT++mcsvPMpVqdyzilEs9sa0bqoC0uhlsJxcgiGVnGk94Q6ciEEpG8YllZiNisxGXK7m5euy5PpL5aJKNsbmGB2hoRmbJS56/L4HC4jiIAjI2HbX2RwntLc+E/iT1ybaxkN/5nAEIveVRTF0JcWegbdEIkgoJdiERQsAuRCAp2IRJBwS5EInS04CTc0ayTLKpYxlAWlqGq4NlOk/MVanv2NV7o8c5FLq3MeVjuOHGefzOw1Mezq+qL3P9yhfvf0xORmkjbq9jxLMf9yEXaNcUy2JzIaB65vxQicuN8jWffVetcKmOyXCxjLyahLURab/UNcXltaCNvOVath4/52qs8q7NAshFrVe6f7uxCJIKCXYhEULALkQgKdiESQcEuRCIo2IVIhA5LbwBY1pBzuSPLwsX6ms5loUaOF/g7Osmlsvsf4vk9H719b3D8yMkzwXEAWGzEihBGZKguXjgwK3JbD+lhVuzmstbSHJeuYtlhHpGoCiRjK8vz1yx2rixSVDLWx25pcf6i58TONTQ8Qm0bxnjG5NlzU9Q2ffZUePwt3pPw2l27woaIpKg7uxCJoGAXIhEU7EIkgoJdiERQsAuRCAp2IRKho9Jbls8wMjQUtJXLXA5bWApn8hQznv1Vj8hCuUhxyyeeOkBtR06Gs+VmFnjhyKn5JWojyU4AgN7eSLZcpKhgqRR+bvmIXNfVzTPKskhGXL7Aj9kg95F6RPKyiM2d+9io8fWv1sKL3N3FpcjRDRuobXiUy2vVSOZmpRgpHkn6szXzXD5eKIevq2ZEwtadXYhEULALkQgKdiESQcEuRCIo2IVIhGV3482sC8ATAErt3/++u3/BzHYBeBDABgDPAPiMu0f2lwFvOipkF7EUedupNMK7rYWM7wbX+SYyPMdPluvmu+DHSMJLLpLcUa/xHeaYYlAul6ltIdKeKEeeG9ulB4DeIt/17Y4k0ORy3P9iV/h83T18fatVnghzdoonkjTB5+UL4fUYHuilc8ZGwooRAGzezBNhphd4nb+56fPUNj8zHRwfGuHnOnvmbHC8HkkmWsmdvQLgo+7+AbTaM99hZh8C8LcAvuru1wI4D+CzKziWEGKdWDbYvcU7eYKF9j8H8FEA32+PPwDg42vioRDisrDS/uxZu4PrJIAfA3gTwLT7L1uUHgewdW1cFEJcDlYU7O7ecPc9ALYBuAXA9Ss9gZndY2b7zWx/bZG3WBZCrC0XtRvv7tMA/gnAhwEMmf2ysfc2ACfInH3uvtfd9xZ6BlblrBDi0lk22M1so5kNtR93A/gYgINoBf2ftn/tbgCPrJWTQojVs5JEmHEAD5hZhtabw0Pu/vdm9gqAB83sPwN4DsA3lztQs9lEZSksKZUyo/N6iJfNGk8yiXQtQhNcMoolEjRJu6l6NZLA0eDPK9aCKGZrRhJhmPR2/jyXfqYi6zjQxyWqwUg9tgFSC68LXMprNLl0lbdIsk6Jv9iVcviYpTx/XWLnqi/ORGzc//npc9TWJMk6XSUuiZZZnTyLPC9qaePuBwDcFBg/jNbf70KIXwH0DTohEkHBLkQiKNiFSAQFuxCJoGAXIhEsJvFc9pOZnQFwrP3jKIBw6k5nkR/vRn68m181P3a6+8aQoaPB/q4Tm+1393DzNPkhP+THZfdDH+OFSAQFuxCJsJ7Bvm8dz30h8uPdyI9382vjx7r9zS6E6Cz6GC9EIqxLsJvZHWb2mpkdMrP71sOHth9HzexFM3vezPZ38Lz3m9mkmb10wdiImf3YzN5o/z+8Tn580cxOtNfkeTO7swN+bDezfzKzV8zsZTP76/Z4R9ck4kdH18TMuszsKTN7oe3Hf2qP7zKzJ9tx8z0z4xVXQ7h7R/8ByNAqa3U1gCKAFwDc0Gk/2r4cBTC6Due9DcDNAF66YOy/ALiv/fg+AH+7Tn58EcC/7/B6jAO4uf24H8DrAG7o9JpE/OjomgAwAH3txwUATwL4EICHAHyqPf7fAfzlxRx3Pe7stwA45O6HvVV6+kEAd62DH+uGuz8B4L21ke9Cq3An0KECnsSPjuPuE+7+bPvxHFrFUbaiw2sS8aOjeIvLXuR1PYJ9K4C3L/h5PYtVOoAfmdkzZnbPOvnwDmPuPtF+fArA2Dr6cq+ZHWh/zF/zPycuxMyuQqt+wpNYxzV5jx9Ah9dkLYq8pr5Bd6u73wzgDwH8lZndtt4OAa13drTeiNaDrwO4Bq0eARMAvtypE5tZH4AfAPicu7+rOmkn1yTgR8fXxFdR5JWxHsF+AsD2C36mxSrXGnc/0f5/EsDDWN/KO6fNbBwA2v9ProcT7n66faE1AXwDHVoTMyugFWDfcfcftoc7viYhP9ZrTdrnvugir4z1CPanAexu7ywWAXwKwKOddsLMes2s/53HAP4AwEvxWWvKo2gV7gTWsYDnO8HV5hPowJqYmaFVw/Cgu3/lAlNH14T50ek1WbMir53aYXzPbuOdaO10vgngP6yTD1ejpQS8AODlTvoB4LtofRysofW312fR6pn3OIA3APwEwMg6+fFtAC8COIBWsI13wI9b0fqIfgDA8+1/d3Z6TSJ+dHRNAPwWWkVcD6D1xvIfL7hmnwJwCMD/BlC6mOPqG3RCJELqG3RCJIOCXYhEULALkQgKdiESQcEuRCIo2IVIBAW7EImgYBciEf4vt7E0CnHQV6IAAAAASUVORK5CYII=\n",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD5CAYAAADhukOtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAAe8klEQVR4nO2da4yc53Xf/2feuex9Z5dLLpdXURJlRVZiSqFVO1EV2akDRUkgGwhcu4ChAEYUBBEQA+kHwQVqF+gHp6ht+EPhgq5VK4ZrWbUtSEiE1LYcRDDsSKJu1IW6ULxIJJdcksu97+zcTj/MyKXU5//sksudpf38fwDB2efs875nnnnPvLPPf8455u4QQvz6k1tvB4QQnUHBLkQiKNiFSAQFuxCJoGAXIhEU7EIkQn41k83sDgBfA5AB+B/u/qXY7/d3533DQDF8rPh5Ltq3mKTo4Lbouci06PH40eJGj70Px/wP2yx2MjIHAGLK7KXJttyP2NHcL/4aaB2TrQenGX3Sl+ZH7NkxSzPiBvNxer6OxaVG0MlLDnYzywD8NwAfA3AcwNNm9qi7v8LmbBgo4gv/7vrw8bxJz1UshN20HA+IanWJ2uqNGj9XMfxmBACNZthHj7wqlmtQWy6jJnitlx8T/JiFYiU4nkVeastx/xvNOrXV6vw1azZJUBj3ox6+RgEAS+x4WC5wwz7G3tSrVX59NBqRdYxcw7nIa1Yl19U8X3osVMPH+/ZPTkR8uHRuAXDI3Q+7exXAgwDuWsXxhBBryGqCfSuAty/4+Xh7TAhxBbLmG3Rmdo+Z7Tez/XOLkc8lQog1ZTXBfgLA9gt+3tYeexfuvs/d97r73r7uVe0HCiFWwWqC/WkAu81sl5kVAXwKwKOXxy0hxOXmkm+17l43s3sB/B+0pLf73f3l6BwYquT9xX2RTyS7lSXwHesc+FZ3Ph/ZIb8ExcsKfNJStUpt9WbEx4j0lkV28fNkmjX5DjPqXLmI7SI3I/5XrSs43shKfE7seA2+HtbkPhpRE7oir1neuC2XjygXtcgaG/8T1skae0RnyLKwjzFlYlWfq939MQCPreYYQojOoG/QCZEICnYhEkHBLkQiKNiFSAQFuxCJ0OFvuTicJVY4l3+8EZ5jDS7VNGtc8sq6IzIOeDIDk7yaEemnWChQW925rVmLPLfI+er1sM0imVy5iMxnGU8M8iwsrwHAYiMssZ06x+Wp+Sr3cW6Oz8ucr0d/V3gdi8Zf54GebmrrLnEJrZnj11wuKqOFfeRXB1BjyVcR7U13diESQcEuRCIo2IVIBAW7EImgYBciETq6G2/uyDfIrnsW2S0mSRylLJIfn49tS0YSHUiCAQCaCFOPFQvLcT8KRb7ru/mq66htZuostZ09txA+V57vqucQSU6p80tk0bn/B4+FffTSMJ1Ty3hiU7WP7/zPTU9S24mJqeB4X4k/r8ap8BwA2DHK13FDP1/HrnysnFX4Oi5GLuEGUSBi5bZ0ZxciERTsQiSCgl2IRFCwC5EICnYhEkHBLkQirEO517A0YPkyn0HkhHqsA0eOy3LVOk9YKEZqpDUapFZYJDEFESmkGKmD9q/+zceo7Zmf/4LaTk6dC47PRyS0eoNLXseOn6G2Iyd495FSeSw4vm10F53jpX5qq+b561Lo20ht9cpccPzcxEk6p6fM5cHjc6eprUJqJQLAaD9Pa+kphBNhGrWwjAoArIlPpJOX7uxCpIKCXYhEULALkQgKdiESQcEuRCIo2IVIhFVJb2Z2FMAsgAaAurvvjf1+03JYyoXllemFHjqvQdoTDfVxeW0g43JYPlKPrRmR5ZisQevqIZ5Ft7Bwntp++vePUNvpKV6v7/Rc+HzHTvBzHRt/m9qyrj5qa2QD1NY7MBIcL/Tw4+W7eBZdKdKSqSvHpcOz1XBbsbFtO+icyuI8tR05wqW3yekKtWXGn/dVG8O2QoNLecbqMkak3suhs3/E3XnOpRDiikAf44VIhNUGuwP4kZk9Y2b3XA6HhBBrw2o/xt/q7ifMbBOAH5vZq+7+xIW/0H4TuAcAhvp5lQ8hxNqyqju7u59o/z8B4GEAtwR+Z5+773X3vX3d6/BVfCEEgFUEu5n1mln/O48B/AGAly6XY0KIy8tqbrWjAB5ub/XnAfwvd//H2IR603BmMZzhM1kr03lP/Pyfg+O/sZtLLh95f1j6AYChSHHLJslsA4AcadOTy/GMpobztkURNQlHjh2htslFngHmPUPB8ayPSz+5oVlq6y4PUlu1wqWmKmmvNDDEX7OBPm6bOHWK2mbO84KT/cXwJd7VzWW+t85zcanQv4nazpx6i9r6TvM13jwQ9qXbIpmKpAgrIrLyJQe7ux8G8IFLnS+E6CyS3oRIBAW7EImgYBciERTsQiSCgl2IROhsr7eshPxguODgwjn+vlMrhgsKTi6EpTAAWKjy3mADRZ7Z1iR9t9rG4HCW8Yy9SpVLPGd48hrOznIJMFYQcWhjOJtrvjlD54yA+5hFMtGqBb6Olfmw1FSZ437sHN1AbQtEQgOACZLZBgBWCMuU05O8mCMiBUQX53lGXFbk18HEDM86HCfZcjtH+PWdYwlxsRaH3CSE+HVCwS5EIijYhUgEBbsQiaBgFyIROrob39Xdi/f91v+XBQsAOP4vr9F5fYPh3fhbPhw+FgD0ZMeorUp2igEgl+dJLVYI70w3vEzn9G/aTm3PHzhEbX1lvjO9def7qc1z4d3nQmTnvLkUbhkFANVqpMVWZK0yksTx8gsH6JyBUqRFUi9PkumN1LU7eSpcM65OlBUAyMgOPgAM9XN1YrrBk57OT3LbkVPTwfEto5vpnDxTlCLZVbqzC5EICnYhEkHBLkQiKNiFSAQFuxCJoGAXIhE6Kr3lsjx6BsOS0s6rr6PzFolqsWPXtXTOSI1LK1NHuCxXiyTCNOrhRIdbbvs4nbPjat4Ra9dvHqW2Z557gdqG+rgkc3IiXD8t77yMd6nAJS/wZcRcJClkmtSFG+rl54qcCo2IVDayMSzNAsBSLfx6nj0flrsAwCItu/ojdfLyGQ+naoUn3hx++3hwfGOZy3y7t4XbqHnk/q07uxCJoGAXIhEU7EIkgoJdiERQsAuRCAp2IRJhWenNzO4H8McAJtz9xvbYMIDvAbgKwFEAn3R3XmTrnWPlcshK4Qylk6cP0nl7fvuDwfHeQV7zK5s9QW2NeqRFTqTW2eG3w9lytw6F6+oBAHq2UVN/L5djuvI8k6s7Uuusq0gytiJ11bZuGaO2V958k9qKRV7nb2Y2vFZXbdtN51x3/Q3UNjnJL6++gTK1nTw1ERy3HK/vVh7iNf6mI7Xksohk191TprbF2fB1cIhcbwDQXQyfq1aPZClSy//jWwDueM/YfQAed/fdAB5v/yyEuIJZNtjb/dbf+w2JuwA80H78AICPX163hBCXm0v9m33U3cfbj0+h1dFVCHEFs+oNOnd3RL7paGb3mNl+M9s/Pc1rhgsh1pZLDfbTZjYGAO3/w7sgANx9n7vvdfe9g4MDl3g6IcRqudRgfxTA3e3HdwN45PK4I4RYK1YivX0XwO0ARszsOIAvAPgSgIfM7LMAjgH45EpOZpah0BW+u1cqvCDi0lI47a0QkaB6evmniN5IS6NSxrPe+vLhfk3f2vdNOudP/u291FaYP0VtxVIkeynHfdx19dbg+MTkSTqnMsez1zZvGqG2yRkuHS5Vw6/n1dfyTMVrruWZj9PPPUtt87Nz1DYzH/ax3uAS1eJiuB0TAJTLg9TWcC6VDZR5tl+9Gn49sxzvD3Z8PPxhukqy/IAVBLu7f5qYfn+5uUKIKwd9g06IRFCwC5EICnYhEkHBLkQiKNiFSISOFpyEGSwLSxALEfmnsrAYHC9EenLNnuNZXsi49FYAL0Q4Vg5nSr1xkPdsO3mc27DA5bBjx49S202beY+7rTvDxSi3TPBvNM8f4gU4h0tlausvc1nu8OGjwfGxLWFpEACmZvg3LGsRqez0Gd6rrukWHLdIcciFiPRmOX5dhc/UojdSqBLNcJZd0cLXPQBUz4VlW4+U7dSdXYhEULALkQgKdiESQcEuRCIo2IVIBAW7EInQWenNAZCeXZlzaWVsJNwfrqeLS28/PcALJQ5FivLtHubZSV2lsOxSzHOp5szEUWprLvHihTuu4UUss8jz7hkYCo6PjPLCl+cmedbYdCSzrRFRNzeS/mv5iFxaIdlfQDyba7HCs8PqxEk2DgCVJZ6BWa/z++OGkU3UZsavq6KFr5+SRfoOejjjsxApeqk7uxCJoGAXIhEU7EIkgoJdiERQsAuRCB3djTcDCvlwMslgH09OKfeHbdbku5UzzhMPzp7nKQsj/XxJeovhHdVGLlwjDwCOnjxKbaNDvJ7Zzmt5K6QKPx2eeibcRuvEON/57+8L7+ADQKHAWzy9fOgt7gi5jzQj95elyG783DxPCikP83ZNdZIIM36aFkRGbz9/XfIZTzTp6eE1EYusLRcA1MKJPI35KTpldFN/cDxf4G2tdGcXIhEU7EIkgoJdiERQsAuRCAp2IRJBwS5EIqyk/dP9AP4YwIS739ge+yKAPwdwpv1rn3f3x1ZywszCUsjmTeHaaS0niYwTSYAY28YTSfZH5LAp45KdZ+E6eYMjPKlicIAnQBS6wvIJAFwVkd76BsOJQQDwP+//dnB8IbJWM4uT1LawyGsDFiJXz+ah8POuTPJ6d/Mk0QgABgf46/Lqa29Q2+nTZ4LjM5GWUeUyf2IDvX3UljnXRAtVvo4ZqUW4sZcfb7ArHEf5yO17JXf2bwG4IzD+VXff0/63okAXQqwfywa7uz8BgL/1CyF+JVjN3+z3mtkBM7vfzPhXsIQQVwSXGuxfB3ANgD0AxgF8mf2imd1jZvvNbP/U1NQlnk4IsVouKdjd/bS7N9y9CeAbAGjXAnff5+573X1vuVy+RDeFEKvlkoLdzMYu+PETAF66PO4IIdaKlUhv3wVwO4ARMzsO4AsAbjezPWhVlTsK4C9WcrJcLkezfwaGuPRWb4TdLOV5JtF1u3ZQ2/5nuOQ1U7iW2po2Gxwf3crltVcO/gu1/c7v/Rm1/eLnfN78fKRNUvVscHzi1Nt0Tuw9f67GbXlwaWgoF86y29rNfZ8+wyW0esa3hUY3cVujEc6kW4y0eKos8rp785EaevUml/NqlRPUtqkQzujb0sez6Jbq4Tmxu/eywe7unw4Mf3O5eUKIKwt9g06IRFCwC5EICnYhEkHBLkQiKNiFSISOFpzM5XLo7QtnLw2NjNB5dQu7WckV6ZyuvgFqK5d5QcG33j5Fbbd+8P1hP+Z4O6me/nDWFQCMnzhObYdef53a6g3enihH6g3Oz0zTOf0bxqhteprLUIN9vBjl+667MTj+9Auv0jnPvnqU2m69/Q+prVDkEtXhQ4eC49Oz/HnFimJWFrm8tnOUS7rdvbyg6vBweJ7neQHOejVc+NJJVimgO7sQyaBgFyIRFOxCJIKCXYhEULALkQgKdiESoaPSm3sTzXpY8hgc5oX85hfDhQgXGrzvVpbx97Ed27dR2+sv88yr6YWwxNbXyzPstl9DTTj2Oi++eOLkOLV9+MMfpLaFhbA01L9lK50zvIUX53xrkktli0tcciz2hvuvDWzcTufc1M9flzNnwv3QAODosReobX4xLFNOTXMJbePGjdQ26Px12dnHJdFNA7wHW8HCmYDVGu9v10skthx4TOjOLkQiKNiFSAQFuxCJoGAXIhEU7EIkQkd345v1GmbPhXczuyO1vZYq4V1Oa3L3zfiu5Mgwb5/0eu4wtU1Mhlv4nMv4rvRgH6+td/2NPCHn8DFeM67GuyRhaiasduzevZvO2b2LSwbHxnkCzcsvv0ht586Gk1OKJa66DPXxRJLjL3NV4NQ5XtfOSLJUFmm9FWsdtpPnmWBHP08M6srxpJalSvj6aTZ5bcNanRyPX/a6swuRCgp2IRJBwS5EIijYhUgEBbsQiaBgFyIRVtL+aTuAvwMwitbG/j53/5qZDQP4HoCr0GoB9Ul3D/f8abO0tITDh8LS1o7dv0HndeXC0luzyhMF8l0RGSRi6+/n0lDfQLiu3fXXv4/O+cmPHqO2hWle765neBO1HTo+QW3bt4WTcna972Y6p1Tkl8HVO3iSz9Qkf7lfORhOKGo61w1PTPFEkhmSDAUAlQaXbWemwlLkps086eatc7w+3fB2LpeeK3E/0OTPbaoefm6e59fpEjleFTzhZiV39jqAv3H3GwB8CMBfmdkNAO4D8Li77wbwePtnIcQVyrLB7u7j7v5s+/EsgIMAtgK4C8AD7V97AMDH18hHIcRl4KL+ZjezqwDcBOBJAKPuv0zuPYXWx3whxBXKioPdzPoA/ADA59z9Xd9PdHcH+aKemd1jZvvNbP/sLC8YIIRYW1YU7GZWQCvQv+PuP2wPnzazsbZ9DEBw18jd97n7XnffG9v8EkKsLcsGu5kZWv3YD7r7Vy4wPQrg7vbjuwE8cvndE0JcLlaS9fa7AD4D4EUze7499nkAXwLwkJl9FsAxAJ9c7kALS3U8fygsG+248RY6r4lwtpmxzB8AaPL0n5nZWWqbmjpLbRuG9wTH77zjI3TOng9cT20P/fBhajPjEsrg4BC1bd0SlpT6Bsp0TlYPry8ADG/ml8jYrhq1TXeHZaPnXuD14sbneEqZF3g7r8HNPItx5JqwVJZFZK2Gcz9e83D7MgA4dIrLg8WMH3OxUgmOL0Qu73ozfH3MNnh24LLB7u4/A8A8/f3l5gshrgz0DTohEkHBLkQiKNiFSAQFuxCJoGAXIhE6WnCy0jC8Pt0dtJ1t8AKAXghLE7kqL4boRJoAgFyO27aM8Wyzf/074cyxrgKXXHbt5G2X/uhPP0Vt33/4H6jt7Cn+vMenw8ULK5VDdE4RXOOZXOS2Q8d41h6qYVnOR3iG4NCmcJFKAGhGKim2vvNF5nWFj9m0cCFKAKhF2opNN/i5ugr8mF15Lr3NWzjLrlbg5/JmeH0bEclWd3YhEkHBLkQiKNiFSAQFuxCJoGAXIhEU7EIkQkelt6WG4fWp8PvLIz/jfcP27BwJjm8u8gyknkIkW2sz7782NsKzq665mhQpdF5McPzMOWq7/0Eurz37/CvUxnrfAQBNBHT+vu4NfrxGia9HI8eloTzCEms9Ig3Vc+E5ANAVu1IjWWqVavh5e47PyUcy4rIm7+vnFS5T1sHnFZphHzPjr1m1FvY/0uJQd3YhUkHBLkQiKNiFSAQFuxCJoGAXIhE6uhvfgGEuF04WePzZ1+m8N94Mt4y647dvoHOu2cLb9Bw5HG5NBAC3ffBGausiiQmzVb7D/NA/Pk1tz71yktoW6pFWQpHd4lwh/P7djNTkyxnfRY7tWjeaPAFoieww1xp8jhmvabeESFKI8+eWz5Od7ozf53p6eEJLEdz/Bt9wR8N4qDXIxHqNvy7F/nJw3HL8PLqzC5EICnYhEkHBLkQiKNiFSAQFuxCJoGAXIhGWld7MbDuAv0OrJbMD2OfuXzOzLwL4cwBn2r/6eXd/LHqyfB4bRjYGbZPnuXwyfn4qOP7zF3irm0ZtZ8QTLq1s3EySXQBYFpbDntr/Ep3zDz/9BbUtNXnNNeS59JbLXfx7dGOJJ7t4RJZrRuS1mOTFWigV8vySs4xLmMj4a5aPzMuy8PliTUazyPrmnMuDjUiyUTMiHTLNbvNmLh/3D4Rtb5Yi68Q9+CV1AH/j7s+aWT+AZ8zsx23bV939v67gGEKIdWYlvd7GAYy3H8+a2UEAvGSqEOKK5KI+D5rZVQBuAvBke+heMztgZvebGW8tKoRYd1Yc7GbWB+AHAD7n7jMAvg7gGgB70Lrzf5nMu8fM9pvZ/voib5UshFhbVhTs1qrC/wMA33H3HwKAu59294a7NwF8A0Cwwbq773P3ve6+N9/NG0EIIdaWZYPdzAzANwEcdPevXDA+dsGvfQIA35IWQqw7K9mN/10AnwHwopk93x77PIBPm9ketOS4owD+YrkDmRmVSQoFLjXVK2E54ejpGTpnaf4gtd1283XU1l0eo7bpSlgi+ecn99M5FeeZS7U6l3FKJZ7Z1ozUQVtYCLcSipFFMrKMJ70h0pEJJSJ5xbKyELFZicuU3d28dl2eSH21SEbZ7Pw8tTUiMuVSnb8ug0PhOooAMDoWtvVFCu8tzob/JPbItbGS3fifAQi95FFNXQhxZaFv0AmRCAp2IRJBwS5EIijYhUgEBbsQidDRgpNwR7NOsqhiGUNZWIaqgmc7TcwtUduzr/FCj3cucGll1sNyx4nz/JuBpT6eXVVf4P5Xlrj/PT0RqYm0vYodz3Lcj1ykXVMsg82JjOaR+0shIjfO1Xj2XbXOpTImy8Uy9mIS2nyk9VZfmctr5Y285Vi1Hj7ma6/yrM4CyUasVbl/urMLkQgKdiESQcEuRCIo2IVIBAW7EImgYBciETosvQFgWUPO5Y4sCxfrazqXhRo5XuDv6ASXyu5/iOf3fPT2vcHxIyfPBMcBYKERK0IYkaG6eOHArMhtPaSHWbGby1qLs1y6imWHeUSiKpCMrSzPX7PYubJIUclYH7vFhbmLnhM7V3lomNo2jPKMybPnJqlt6uyp8PhbvCfhtbt2hQ0RSVF3diESQcEuRCIo2IVIBAW7EImgYBciERTsQiRCR6W3LJ9huFwO2ioVLofNL4YzeYoZz/6qR2ShXKS45RNPHaC2IyfD2XLT87xw5OTcIrWRZCcAQG9vJFsuUlSwVAo/t3xEruvq5hllWSQjLl/gx2yQ+0g9InlZxObOfWzU+PpXa+FF7u7iUuTIhg3UNjTC5bVqJHNzqRgpHkn6szXzXD6er4Svq2ZEwtadXYhEULALkQgKdiESQcEuRCIo2IVIhGV3482sC8ATAErt3/++u3/BzHYBeBDABgDPAPiMu0f2lwFvOpbILmIp8raz1AjvthYyvhtc55vI8Bw/Wa6b74IfIwkvuUhyR73Gd5hjikGlUqG2+Uh7ohx5bmyXHgB6i3zXtzuSQJPLcf+LXeHzdffw9a1WeSLM2UmeSNIEn5cvhNdjaKCXzhkdLlPb5s08EWZqntf5m506T21z01PB8fIwP9fZM2eD4/VIMtFK7uxLAD7q7h9Aqz3zHWb2IQB/C+Cr7n4tgPMAPruCYwkh1ollg91bvJMnWGj/cwAfBfD99vgDAD6+Fg4KIS4PK+3PnrU7uE4A+DGANwFMuf+yRelxAFvXxEMhxGVhRcHu7g133wNgG4BbAFy/0hOY2T1mtt/M9tcWeItlIcTaclG78e4+BeCfAHwYQNnsl429twE4Qebsc/e97r630DOwGl+FEKtg2WA3s41mVm4/7gbwMQAH0Qr6P23/2t0AHlkjH4UQl4GVJMKMAXjAzDK03hwecve/N7NXADxoZv8ZwHMAvrncgZrNJpYWw5JSKTM6r4d42azxJJNI1yI0wSWjWCJBk7SbqlcjCRwN/rxiLYhitmYkEYZJb+fPc+lnMrKOA31cohqM1GMbILXwusClvEaTS1d5iyTrlPiLvVQJH7OU569L7Fz1hemIjfs/N3WO2pokWaerxCXRCquTZ5HnRS1t3P0AgJsC44fR+vtdCPErgL5BJ0QiKNiFSAQFuxCJoGAXIhEU7EIkgsUknst+MrMzAI61fxwBEE7d6Szy493Ij3fzq+bHTnffGDJ0NNjfdWKz/e4ebp4mP+SH/LjsfuhjvBCJoGAXIhHWM9j3reO5L0R+vBv58W5+bfxYt7/ZhRCdRR/jhUiEdQl2M7vDzF4zs0Nmdt96+ND246iZvWhmz5vZ/g6e934zmzCzly4YGzazH5vZG+3/h9bJjy+a2Yn2mjxvZnd2wI/tZvZPZvaKmb1sZn/dHu/omkT86OiamFmXmT1lZi+0/fhP7fFdZvZkO26+Z2a84moId+/oPwAZWmWtrgZQBPACgBs67Ufbl6MARtbhvLcBuBnASxeM/RcA97Uf3wfgb9fJjy8C+PcdXo8xADe3H/cDeB3ADZ1ek4gfHV0TAAagr/24AOBJAB8C8BCAT7XH/zuAv7yY467Hnf0WAIfc/bC3Sk8/COCudfBj3XD3JwC8tzbyXWgV7gQ6VMCT+NFx3H3c3Z9tP55FqzjKVnR4TSJ+dBRvcdmLvK5HsG8F8PYFP69nsUoH8CMze8bM7lknH95h1N3H249PARhdR1/uNbMD7Y/5a/7nxIWY2VVo1U94Euu4Ju/xA+jwmqxFkdfUN+hudfebAfwhgL8ys9vW2yGg9c6O1hvRevB1ANeg1SNgHMCXO3ViM+sD8AMAn3P3d1Un7eSaBPzo+Jr4Koq8MtYj2E8A2H7Bz7RY5Vrj7ifa/08AeBjrW3nntJmNAUD7/4n1cMLdT7cvtCaAb6BDa2JmBbQC7Dvu/sP2cMfXJOTHeq1J+9xTuMgir4z1CPanAexu7ywWAXwKwKOddsLMes2s/53HAP4AwEvxWWvKo2gV7gTWsYDnO8HV5hPowJqYmaFVw/Cgu3/lAlNH14T50ek1WbMir53aYXzPbuOdaO10vgngP6yTD1ejpQS8AODlTvoB4LtofRysofW312fR6pn3OIA3APwEwPA6+fFtAC8COIBWsI11wI9b0fqIfgDA8+1/d3Z6TSJ+dHRNAPwWWkVcD6D1xvIfL7hmnwJwCMD/BlC6mOPqG3RCJELqG3RCJIOCXYhEULALkQgKdiESQcEuRCIo2IVIBAW7EImgYBciEf4vt7E0CllzrOkAAAAASUVORK5CYII=\n",
       "text/plain": [
        "<Figure size 432x288 with 1 Axes>"
       ]
@@ -639,109 +733,147 @@
     "import numpy as np\n",
     "\n",
     "fn = pk.resource_filename(\"finn\", \"data/cifar10/cifar10-test-data-class3.npz\")\n",
-    "x = np.load(fn)[\"arr_0\"].astype(np.float32)\n",
-    "x = x / 255\n",
-    "plt.imshow(x.reshape(3, 32,32).transpose(1, 2, 0))"
+    "x = np.load(fn)[\"arr_0\"]\n",
+    "x = x.reshape(3, 32,32).transpose(1, 2, 0)\n",
+    "plt.imshow(x)"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Recall that we partitioned our original network into a parent graph that contained the non-synthesizable nodes and a child graph that contained the bulk of the network, which we turned into a bitfile. We'll load up the parent graph, modify the `StreamingDataflowPartition` node so that it points to the deployed ONNX graph."
+    "Recall that we partitioned our original network into a parent graph that contained the non-synthesizable nodes and a child graph that contained the bulk of the network, which we turned into a bitfile. The only operator left outside the FPGA partition was a `Transpose` to convert NCHW images into NHWC ones. Thus, we can skip the execution in the parent as long as we ensure our image has the expected data layout, which we have done above."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 20,
+   "execution_count": 43,
    "metadata": {},
    "outputs": [],
    "source": [
-    "# point to the PYNQ-deployed model as the StreamingDataflowPartition in the parent\n",
-    "parent_model = ModelWrapper(build_dir+\"/end2end_cnv_w1a1_dataflow_parent.onnx\")\n",
-    "sdp_node = parent_model.get_nodes_by_op_type(\"StreamingDataflowPartition\")[0]\n",
-    "sdp_node = getCustomOp(sdp_node)\n",
-    "sdp_node.set_nodeattr(\"model\", build_dir + \"/end2end_cnv_w1a1_pynq_deploy.onnx\")\n",
-    "parent_model.save(build_dir+\"/end2end_cnv_w1a1_dataflow_parent_with_remote_bitfile_exec.onnx\")"
+    "import numpy as np\n",
+    "from finn.core.onnx_exec import execute_onnx\n",
+    "\n",
+    "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_pynq_deploy.onnx\")\n",
+    "iname = model.graph.input[0].name\n",
+    "oname = model.graph.output[0].name\n",
+    "ishape = model.get_tensor_shape(iname)\n",
+    "input_dict = {iname: x.astype(np.float32).reshape(ishape)}\n",
+    "ret = execute_onnx(model, input_dict, True)"
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 44,
    "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "array([[3.]], dtype=float32)"
+      ]
+     },
+     "execution_count": 44,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
-    "Finally, we can call `execute_onnx` on the parent graph, which will internally call remote execution with the bitfile once the `StreamingDataflowPartition` node is reached, grab the results, then continue executing the last portion of the network. "
+    "ret[oname]"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 27,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [],
    "source": [
-    "import numpy as np\n",
-    "from finn.core.onnx_exec import execute_onnx\n",
-    "iname = parent_model.graph.input[0].name\n",
-    "oname = parent_model.graph.output[0].name\n",
-    "ishape = parent_model.get_tensor_shape(iname)\n",
-    "input_dict = {iname: x.reshape(ishape)}\n",
-    "ret = execute_onnx(parent_model, input_dict, True)"
+    "We see that the network correctly predicts this as a class 3 (\"cat\"). "
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We'll pass the output of the network through a softmax function to interpret it as probabilities, and plot the per-class probabilities as a bar chart."
+    "### Validating the Accuracy on a PYNQ Board <a id='validation'></a>\n",
+    "\n",
+    "All the command line prompts here are meant to be executed with `sudo` on the PYNQ board, so we'll use a workaround (`sshpass` and `echo password | sudo -S command`) to get that working from this notebook running on the host computer.\n",
+    "\n",
+    "**Ensure that your PYNQ board has a working internet connecting for the next steps, since some there is some downloading involved.**\n",
+    "\n",
+    "To validate the accuracy, we first need to install the [`dataset-loading`](https://github.com/fbcotter/dataset_loading) Python package to the PYNQ board. This will give us a convenient way of downloading and accessing the MNIST dataset.\n",
+    "\n",
+    "\n",
+    "Command to execute on PYNQ:\n",
+    "\n",
+    "```pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading```"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 28,
+   "execution_count": 45,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "<BarContainer object of 10 artists>"
-      ]
-     },
-     "execution_count": 28,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAABIEAAADCCAYAAADetdIQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAWR0lEQVR4nO3de9RlZX0f8O9PEDVotJFJVwroEELUUeNtQI0xMWpaDBXsCqxAjfGWEBJRrNWGLF3U0mRV44rWRhJFazReFl4S26miuIr1moAMCnKxKCIKxJohUbygIvLrH3sPHCbvzHtm5rzzvjP78/lnzt77Ofs8735m7/2c7977OdXdAQAAAGDfdpfVrgAAAAAAK08IBAAAADABQiAAAACACRACAQAAAEyAEAgAAABgAoRAAAAAABOw/2p98EEHHdTr169frY8HAAAA2OdcfPHFN3b3uqWWrVoItH79+mzevHm1Ph4AAABgn1NVX9neMo+DAQAAAEyAEAgAAABgAoRAAAAAABMgBAIAAACYACEQAAAAwASs2q+DAbB2rT/9A6tdhX3Gta84ZrWrAAAASdwJBAAAADAJQiAAAACACRACAQAAAEyAEAgAAABgAoRAAAAAABMgBAIAAACYACEQAAAAwAQIgQAAAAAmQAgEAAAAMAFCIAAAAIAJEAIBAAAATIAQCAAAAGAChEAAAAAAEyAEAgAAAJgAIRAAAADABAiBAAAAACZACAQAAAAwAXOFQFV1dFVdVVVXV9XpOyj3a1XVVbVxcVUEAAAAYHctGwJV1X5JzkrylCQbkpxUVRuWKHevJKcluXDRlQQAAABg98xzJ9BRSa7u7mu6+5Yk5yQ5boly/znJK5N8f4H1AwAAAGAB5gmBDk5y3cz09eO821XVI5Mc2t0f2NGKqurkqtpcVZu3bNmy05UFAAAAYNfs9sDQVXWXJK9O8u+XK9vdZ3f3xu7euG7dut39aAAAAADmNE8IdEOSQ2emDxnnbXWvJA9J8tGqujbJY5JsMjg0AAAAwNoxTwh0UZIjquqwqjogyYlJNm1d2N03dfdB3b2+u9cnuSDJsd29eUVqDAAAAMBOWzYE6u5bk5ya5Lwkn0/y7u6+oqrOrKpjV7qCAAAAAOy+/ecp1N3nJjl3m3lnbKfsE3a/WgAAAAAs0m4PDA0AAADA2icEAgAAAJgAIRAAAADABAiBAAAAACZACAQAAAAwAUIgAAAAgAkQAgEAAABMgBAIAAAAYAKEQAAAAAATIAQCAAAAmAAhEAAAAMAECIEAAAAAJkAIBAAAADABQiAAAACACRACAQAAAEyAEAgAAABgAoRAAAAAABMgBAIAAACYACEQAAAAwAQIgQAAAAAmQAgEAAAAMAFCIAAAAIAJEAIBAAAATIAQCAAAAGAC5gqBquroqrqqqq6uqtOXWH5KVV1WVZdU1SerasPiqwoAAADArlo2BKqq/ZKcleQpSTYkOWmJkOed3f3Q7n54kj9O8uqF1xQAAACAXTbPnUBHJbm6u6/p7luSnJPkuNkC3f2tmckDk/TiqggAAADA7tp/jjIHJ7luZvr6JI/etlBVPS/Ji5IckOSJC6kdAAAAAAuxsIGhu/us7j48ye8nedlSZarq5KraXFWbt2zZsqiPBgAAAGAZ84RANyQ5dGb6kHHe9pyT5GlLLejus7t7Y3dvXLdu3fy1BAAAAGC3zBMCXZTkiKo6rKoOSHJikk2zBarqiJnJY5J8cXFVBAAAAGB3LTsmUHffWlWnJjkvyX5J3tzdV1TVmUk2d/emJKdW1ZOT/DDJN5I8cyUrDQAAAMDOmWdg6HT3uUnO3WbeGTOvT1twvQAAAABYoIUNDA0AAADA2iUEAgAAAJgAIRAAAADABAiBAAAAACZACAQAAAAwAUIgAAAAgAkQAgEAAABMgBAIAAAAYAKEQAAAAAATIAQCAAAAmAAhEAAAAMAECIEAAAAAJkAIBAAAADABQiAAAACACRACAQAAAEyAEAgAAABgAoRAAAAAABMgBAIAAACYACEQAAAAwAQIgQAAAAAmQAgEAAAAMAFCIAAAAIAJEAIBAAAATIAQCAAAAGAChEAAAAAAEzBXCFRVR1fVVVV1dVWdvsTyF1XVlVX1uao6v6ruv/iqAgAAALCrlg2Bqmq/JGcleUqSDUlOqqoN2xT7bJKN3f1zSd6b5I8XXVEAAAAAdt08dwIdleTq7r6mu29Jck6S42YLdPf/6e6bx8kLkhyy2GoCAAAAsDvmCYEOTnLdzPT147zteW6SD+5OpQAAAABYrP0XubKq+o0kG5P80naWn5zk5CS53/3ut8iPBgAAAGAH5rkT6IYkh85MHzLOu5OqenKSlyY5trt/sNSKuvvs7t7Y3RvXrVu3K/UFAAAAYBfMEwJdlOSIqjqsqg5IcmKSTbMFquoRSd6QIQD6+8VXEwAAAIDdsWwI1N23Jjk1yXlJPp/k3d19RVWdWVXHjsVeleSeSd5TVZdU1abtrA4AAACAVTDXmEDdfW6Sc7eZd8bM6ycvuF4AAAAALNA8j4MBAAAAsJcTAgEAAABMgBAIAAAAYAKEQAAAAAATIAQCAAAAmAAhEAAAAMAECIEAAAAAJkAIBAAAADABQiAAAACACRACAQAAAEyAEAgAAABgAoRAAAAAABMgBAIAAACYACEQAAAAwAQIgQAAAAAmQAgEAAAAMAFCIAAAAIAJEAIBAAAATIAQCAAAAGAChEAAAAAAEyAEAgAAAJgAIRAAAADABAiBAAAAACZACAQAAAAwAUIgAAAAgAmYKwSqqqOr6qqqurqqTl9i+S9W1Weq6taqOn7x1QQAAABgdywbAlXVfknOSvKUJBuSnFRVG7Yp9tUkz0ryzkVXEAAAAIDdt/8cZY5KcnV3X5MkVXVOkuOSXLm1QHdfOy67bQXqCAAAAMBumudxsIOTXDczff04b6dV1clVtbmqNm/ZsmVXVgEAAADALtijA0N399ndvbG7N65bt25PfjQAAADApM0TAt2Q5NCZ6UPGeQAAAADsJeYJgS5KckRVHVZVByQ5Mcmmla0WAAAAAIu0bAjU3bcmOTXJeUk+n+Td3X1FVZ1ZVccmSVUdWVXXJzkhyRuq6oqVrDQAAAAAO2eeXwdLd5+b5Nxt5p0x8/qiDI+JAQAAALAG7dGBoQEAAABYHUIgAAAAgAkQAgEAAABMwFxjAgEAsLz1p39gtauwz7j2FcesdhUAYJ/jTiAAAACACRACAQAAAEyAEAgAAABgAowJBADAJBizaXGM2TQt9p3Fse+w2oRAwKrRoVgcHYppse8sjn0H1gbHtcVxXAN2RAi0AE5ai+OkBQAAMD/fRxdjKt9FhUDs0xwQF2cqB0UAAIB9lYGhAQAAACZACAQAAAAwAUIgAAAAgAkQAgEAAABMgBAIAAAAYAKEQAAAAAATIAQCAAAAmAAhEAAAAMAECIEAAAAAJkAIBAAAADABQiAAAACACRACAQAAAEyAEAgAAABgAuYKgarq6Kq6qqqurqrTl1h+t6p617j8wqpav+iKAgAAALDrlg2Bqmq/JGcleUqSDUlOqqoN2xR7bpJvdPfPJHlNklcuuqIAAAAA7Lp57gQ6KsnV3X1Nd9+S5Jwkx21T5rgkbx1fvzfJk6qqFldNAAAAAHbHPCHQwUmum5m+fpy3ZJnuvjXJTUnuu4gKAgAAALD7qrt3XKDq+CRHd/dvjdPPSPLo7j51pszlY5nrx+kvjWVu3GZdJyc5eZx8QJKrFvWHsKyDkty4bClWi/ZZu7TN2qZ91i5ts7Zpn7VL26xt2mft0jZrm/bZs+7f3euWWrD/HG++IcmhM9OHjPOWKnN9Ve2f5N5J/mHbFXX32UnOnqfGLFZVbe7ujatdD5amfdYubbO2aZ+1S9usbdpn7dI2a5v2Wbu0zdqmfdaOeR4HuyjJEVV1WFUdkOTEJJu2KbMpyTPH18cn+Ugvd4sRAAAAAHvMsncCdfetVXVqkvOS7Jfkzd19RVWdmWRzd29K8t+TvK2qrk7yjxmCIgAAAADWiHkeB0t3n5vk3G3mnTHz+vtJTlhs1Vgwj+Gtbdpn7dI2a5v2Wbu0zdqmfdYubbO2aZ+1S9usbdpnjVh2YGgAAAAA9n7zjAkEAAAAwF5OCLSXqapzq+o+O/met1TV8StVJ5KqelpVbVjhz1hfVZdvZ9mbtn5+VV1bVQetZF32FdvbprPbc5n3P6uqXrcytWNXVNUTqurnV7seU1FVL6+qF692Pdgx7bS2VNULqurzVfWO1a7L1O2ob8Xat70+b1UdW1Wnr0adpqKq7lNVv7egdT2hqt6/iHUxHyHQXqa7f7W7vzk7rwbacnU9LcmKhkA70t2/1d1Xrtbn72u2tz2rar/VqA875QlJhEB7kaqaa3xC2If8XpJf6e6nb51hP9j7aLO1q7s3dfcrVrse+7j7ZDiW3Yn9Yu8gOFjDqup/VNXFVXVFVZ08zru2qg4ar1xcVVV/meTyJIdW1Xeq6jVj+fOrat0S6zyjqi6qqsur6uyqqnH+R6vqlVX16ar6QlU9fpy/X1W9anzP56rqd/bkNlhN29n+35lZfvx4l9XPJzk2yauq6pKqOryqHl5VF4zb7H1V9c/G93x0bKPN41XAI6vqr6vqi1X1hzPrftHYRpdX1QtnqrV/Vb1jfO97q+rHZta7cYm/4TfGNr2kqt4gxFjSP9mms9tz3K/+pKouTfLYqnr2uI98OsnjVrfq01FVvznuT5dW1duq6qlVdWFVfbaq/ndV/fOqWp/klCT/bvw///jVrfW+qapeOu4Dn0zygHHe4VX1ofGY+YmqeuA4f11V/dV4Drmoqh43zn/52I6fSvK21ftr9l3baaftnZuOHOddMp7z3RmxQqrq9Ul+OskHq+qm2f1g7Nt9ZGyL86vqfuN7Dh/b7bKq+sPZvggLsV9VvXHs7324qu6xTD/uv1bV5iSnVdUJY1/t0qr6+Fhmsn3nlVRVB1bVB8ZtfXlV/fq46PlV9Zlx/9h67rn9Tu0a+uqvH/veX6iqf71qf8S+5RVJDh/PGxeN5/5NSa6sbe6wq6oXV9XLx9c/M/bbLh3b7fDZlY7no89uO5/FEgKtbc/p7kcl2ZjkBVV1322WH5Hkz7r7wd39lSQHJtnc3Q9O8rEk/3GJdb6uu4/s7ockuUeS2QPh/t19VJIXzrz3uUlu6u4jkxyZ5Ler6rBF/YFr3HLbP0nS3X+TZFOSl3T3w7v7S0n+Msnvd/fPJbksd26LW7p7Y5LXJ/mfSZ6X5CFJnlVV962qRyV5dpJHJ3lMhm3+iPG9D8jQ5g9K8q0skcBvVVUPSvLrSR7X3Q9P8qMkT99e+QlbbpsemOTC7n5Yki8l+U8Zwp9fyCre/TUlVfXgJC9L8sSxHU5L8skkj+nuRyQ5J8l/6O5rM+xXrxn3xU+sVp33VePx6cQkD0/yqxnOC8nwix/PH4+ZL07yZ+P812ZojyOT/FqSN82sbkOSJ3f3SXui7lOyg3ba3rnpL5L8zsy5ghXS3ack+bskv5zkNbnzfvCnSd46ts87kvy38W2vTfLa7n5okuv3fK33eUckOWvsP38zw7FqR/24A7p7Y3f/SZIzkvyr8dx07Lh8yn3nlXR0kr/r7oeN32M+NM6/sbsfmeTPM5x/lrI+yVFJjkny+qq6+0pXdgJOT/Kl8bzxkiSPTHJad//sMu97R4b97WEZ7tz+2tYFNVxYf32S48bvU6wQIdDa9oLx7oMLkhya4SQ16yvdfcHM9G1J3jW+fnuGL6nb+uXx6vllSZ6Y5MEzy/56/PfiDAfLJPmXSX6zqi5JcmGS+y5Rj33Vctt/SVV17yT36e6PjbPemuQXZ4psGv+9LMkV3f217v5BkmvGz/mFJO/r7u9293cytMvWOxqu6+5Pja+318ZbPSnJo5JcNLbfkzJcfeTOltumP0ryV+PrRyf5aHdv6e5bcsf+xsp6YpL3dPeNSdLd/5jkkCTnjceyl+TOxzJWzuMzHJ9u7u5vZTie3T1DR+4947HmDUl+aiz/5CSvG+dvSvLjVXXPcdmm7v7enq3+ZCzVTgdmiXNTDeMM3qu7/3ac/849X91Jm90PHps7tv/bcsf56LFJ3jO+1j6L9+XuvmR8fXGSw7Pjftzsuf9TSd5SVb+dZOvd1lPuO6+ky5L8Sg1PLjy+u28a5y/1/WVb7+7u27r7ixn62w9c2apO0qe7+8s7KlBV90pycHe/L0m6+/vdffO4+EEZLig9tbu/urJVxTN7a1RVPSFD5/mx3X1zVX00Q0d71neXWU1vs867Z7g6u7G7rxtvy5td5w/Gf3+UO/5vVIaru+ft7N+wN9vB9p/dprt6FWHrdr5t5vXW6eX2yV5melZluKL4BztXvclZbpt+v7tdGV97/jTJq7t707i/vnx1qzNpd0nyzfFq4FLLHtPd35+dWcOTyMudw2AK7Aerb7Yv9qMMY53syO1t1t2nVNWjM9xhcvF4F94k+84rrbu/UFWPzHB34x9W1fnjoqW+v/yTty8zze6bPZbdmjvfbDLPd6avjeUekeFuSVaQO4HWrnsn+cYYQDwww2NBy7lLkq2/AvZvMzwuMWvrDnjjeCV2nl8MOy/J71bVXZOkqn62qg6c4317u+1t/69X1YNqGIj738yU/3aSeyXJeGXiG3XHeCTPyPB43rw+keRpNYxNc+D4OVsfa7lfVT12fL1UG886P8nxVfWTSVJVP1FV99+JekzFzmzTC5P80vjY3l2TnLDitSNJPpLkhK2PZFbVT2TYR28Ylz9zpuzt+yIr4uMZjk/3GK/oPTXJzUm+XFUnJLf/WMHDxvIfTvL8rW+uqqWCIhZvqXb6bpY4N40/NvHt8YtsMjxGxur4m9yx/Z+eO879F2R4RCnRPnvC3P24qjq8uy/s7jOSbMlwR/dU+84rqqr+RZKbu/vtSV6V4fGjeZ1QVXcZx5n56SRXrUQdJ2ZH/a2vJ/nJsb98t4zDj3T3t5NcX1VPS5KquluN45tmeBTzmCT/Zby4xwpyJ9Da9aEkp1TV5zMcqC5YpnwydPCOqqqXJfn7DOPB3K67v1lVb8wwkPT/S3LRHOt8U4ZbKz9Tw6XbLRl+CWtft73tf3qS92fYDpuTbH2s4Zwkb6yqF2QI156Z4ZnjH8tw2+mz5/3g7v5MVb0lyafHWW/q7s/WMOjtVUmeV1VvTnJlhueft7eeK8f/Cx8eQ6sfZhh/6Cvz1mUiltqmT12qYHd/bbyD7m8znKwuWaoci9XdV1TVHyX5WFX9KMlnM9z5856q+kaGkGjreAv/K8l7q+q4DFdijQu0QOPx6V1JLs1wntl6Hnl6kj8fjzl3zXBMvDTJC5KcVVWfy9Dn+HiGwbtZQTtop+2dm56b4Rx2W4YvuzeF1fD8JH9RVS/J0M/Y2j4vTPL2qnpphv6J9ll58/bjXlVVR2S4++f8DPvc5zLNvvNKe2iG7X1bhj7t7yZ575zv/WqGfvWPJzll27tT2Xnd/Q9V9akaBoD+XobgZ+uyH1bVmRm2+Q1J/u/MW5+R5A3j8h9m5oJqd3+9hoG7P1hVz+nuC/fE3zJF1e1uuH1FVX2nu++5fEkAgEFV3XMcgy5VdXqSn+ru01a5WozGIOJ73d1VdWKSk7r7uNWuF+wNxgur7+/ueQMj2Oe5EwgAYNqOqao/yNAv/EqSZ61uddjGozIMsF4Z7kJ9zirXB4C9mDuBAAAAACbAwNAAAAAAEyAEAgAAAJgAIRAAAADABAiBAAAAACZACAQAAAAwAUIgAAAAgAn4/ybos9quolFvAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<Figure size 1440x216 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[sudo] password for xilinx: Requirement already satisfied: dataset_loading from git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading in /usr/local/lib/python3.6/dist-packages\n",
+      "Requirement already satisfied: Pillow in /usr/lib/python3/dist-packages (from dataset_loading)\n",
+      "Requirement already satisfied: scipy in /usr/lib/python3/dist-packages (from dataset_loading)\n",
+      "Connection to 192.168.2.99 closed.\n"
+     ]
     }
    ],
    "source": [
-    "def softmax(x):\n",
-    "    \"\"\"Compute softmax values for each sets of scores in x.\"\"\"\n",
-    "    e_x = np.exp(x - np.max(x))\n",
-    "    return e_x / e_x.sum()\n",
-    "\n",
-    "logits = ret[oname].flatten()\n",
-    "prob = softmax(logits)\n",
+    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'echo {password} | sudo -S pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "We can now use the `validate.py` script that was generated together with the driver to measure top-1 accuracy on the CIFAR-10 dataset.\n",
     "\n",
-    "classes = [\"airplane\", \"automobile\", \"bird\", \"cat\", \"deer\", \"dog\", \"frog\", \"horse\", \"ship\", \"truck\"]\n",
+    "Command to execute on PYNQ:\n",
     "\n",
-    "plt.figure(figsize=(20, 3)) \n",
-    "plt.bar(classes, prob)"
+    "`python3.6 validate.py --dataset cifar10 --batchsize 1000`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 46,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[sudo] password for xilinx: Tar File found in dest_dir. Not Downloading again\n",
+      "Extracting Python CIFAR10 data.\n",
+      "Files extracted\n",
+      "batch 0 / 10 : total OK 851 NOK 149\n",
+      "batch 1 / 10 : total OK 1683 NOK 317\n",
+      "batch 2 / 10 : total OK 2522 NOK 478\n",
+      "batch 3 / 10 : total OK 3370 NOK 630\n",
+      "batch 4 / 10 : total OK 4207 NOK 793\n",
+      "batch 5 / 10 : total OK 5044 NOK 956\n",
+      "batch 6 / 10 : total OK 5887 NOK 1113\n",
+      "batch 7 / 10 : total OK 6728 NOK 1272\n",
+      "batch 8 / 10 : total OK 7570 NOK 1430\n",
+      "batch 9 / 10 : total OK 8419 NOK 1581\n",
+      "Final accuracy: 84.190000\n",
+      "Connection to 192.168.2.99 closed.\n"
+     ]
+    }
+   ],
+   "source": [
+    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'cd {target_dir_pynq}; echo {password} | sudo -S python3.6 validate.py --dataset cifar10 --batchsize 1000'"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We see that the network correctly predicts this as a class 3 (\"cat\") with high probability. This concludes our tutorial on how to take a convolutional BNN all the way down to hardware with FINN, and execute it remotely on a PYNQ board."
+    "We see that the final top-1 accuracy is 84.19%, which is very close to the 84.22% reported on the [BNN-PYNQ accuracy table in Brevitas](https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq). "
    ]
   },
   {
diff --git a/notebooks/end2end_example/finn-design-flow-example.svg b/notebooks/end2end_example/finn-design-flow-example.svg
index 04f568c1f77cdbdc0dfbc8cd50b76f4c5d45d954..fa36be96c52dad740cef18bca9b81903e1fba3db 100755
--- a/notebooks/end2end_example/finn-design-flow-example.svg
+++ b/notebooks/end2end_example/finn-design-flow-example.svg
@@ -1 +1 @@
-<svg version="1.1" viewBox="0.0 0.0 1459.1627296587926 1074.1811023622047" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l1459.1627 0l0 1074.1812l-1459.1627 0l0 -1074.1812z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l1459.1627 0l0 1074.1812l-1459.1627 0z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m823.8117 436.36414l0 0c-5.5324097 -67.94034 12.631531 -135.19711 46.7843 -173.23087c34.15271 -38.033737 78.304504 -40.174118 113.72015 -5.512909l0 0c12.545288 -39.50331 35.506897 -66.77763 61.93933 -73.573c26.432373 -6.7953644 53.23108 7.686371 72.28992 39.06476l0 0c10.68689 -35.816254 31.67102 -59.880203 55.50598 -63.652695c23.83496 -3.7725067 47.14746 13.28035 61.66516 45.1073l0 0c19.307617 -37.965134 50.02649 -53.950027 78.8645 -41.037933c28.83789 12.912079 50.615356 52.40204 55.909058 101.38234l0 0c23.65503 10.782257 43.359253 38.1922 54.021606 75.14795c10.6623535 36.95575 11.236938 79.83206 1.5753174 117.551056l0 0c23.293335 50.660706 28.742188 118.1655 14.313232 177.32278c-14.428955 59.157288 -46.568115 101.079346 -84.423584 110.12164c-0.26672363 55.52124 -18.488403 106.4668 -47.64148 133.20013c-29.153076 26.733337 -64.68518 25.079895 -92.900635 -4.323059c-12.018311 66.49628 -45.845947 115.423645 -86.86792 125.643616c-41.022095 10.21991 -81.88464 -20.09961 -104.93353 -77.859436c-28.253052 28.470032 -62.15442 36.671326 -94.05664 22.753845c-31.90216 -13.917419 -59.11786 -48.78131 -75.50775 -96.72699l0 0c-28.871155 5.645691 -56.785522 -19.350464 -69.88934 -62.582947c-13.103821 -43.232544 -8.607666 -95.49823 11.257141 -130.85791l0 0c-25.753784 -25.329956 -38.894897 -75.59302 -32.5708 -124.57892c6.3240967 -48.98593 30.680359 -85.59436 60.36798 -90.73541z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m795.4438 654.297l0 0c12.15332 11.953308 26.193237 17.37555 40.23462 15.538818m18.391296 177.90631c6.03833 -1.1807861 11.957031 -3.6812744 17.603455 -7.4369507m151.95251 81.40704c-4.2469482 -10.642822 -7.8027344 -22.015564 -10.6067505 -33.9245m202.4154 -13.861694l0 0c2.191162 -12.12323 3.6108398 -24.600708 4.2353516 -37.224182m136.30164 -91.648254c0.28405762 -59.1109 -19.807129 -113.23346 -51.6438 -139.12042m121.75342 -148.31775c-5.155884 20.128387 -13.026855 37.98398 -22.99585 52.16684m-32.596313 -244.87112l0 0c0.87854004 8.128296 1.2850342 16.378754 1.2141113 24.637024m-135.98535 -84.97896l0 0c-4.816284 9.470413 -8.78418 20.053482 -11.7803955 31.419678m-105.391846 -12.878418l0 0c-2.5666504 8.601898 -4.4832764 17.704193 -5.7056885 27.097183m-128.52551 7.4117737l0 0c7.4939575 7.3342896 14.426819 16.161896 20.646423 26.28888m-181.1488 152.45584l0 0c0.7625122 9.363922 1.9672852 18.612427 3.6030884 27.658905" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m823.8117 436.36414l0 0c-5.5324097 -67.94034 12.631531 -135.19711 46.7843 -173.23087c34.15271 -38.033737 78.304504 -40.174118 113.72015 -5.512909l0 0c12.545288 -39.50331 35.506897 -66.77763 61.93933 -73.573c26.432373 -6.7953644 53.23108 7.686371 72.28992 39.06476l0 0c10.68689 -35.816254 31.67102 -59.880203 55.50598 -63.652695c23.83496 -3.7725067 47.14746 13.28035 61.66516 45.1073l0 0c19.307617 -37.965134 50.02649 -53.950027 78.8645 -41.037933c28.83789 12.912079 50.615356 52.40204 55.909058 101.38234l0 0c23.65503 10.782257 43.359253 38.1922 54.021606 75.14795c10.6623535 36.95575 11.236938 79.83206 1.5753174 117.551056l0 0c23.293335 50.660706 28.742188 118.1655 14.313232 177.32278c-14.428955 59.157288 -46.568115 101.079346 -84.423584 110.12164c-0.26672363 55.52124 -18.488403 106.4668 -47.64148 133.20013c-29.153076 26.733337 -64.68518 25.079895 -92.900635 -4.323059c-12.018311 66.49628 -45.845947 115.423645 -86.86792 125.643616c-41.022095 10.21991 -81.88464 -20.09961 -104.93353 -77.859436c-28.253052 28.470032 -62.15442 36.671326 -94.05664 22.753845c-31.90216 -13.917419 -59.11786 -48.78131 -75.50775 -96.72699l0 0c-28.871155 5.645691 -56.785522 -19.350464 -69.88934 -62.582947c-13.103821 -43.232544 -8.607666 -95.49823 11.257141 -130.85791l0 0c-25.753784 -25.329956 -38.894897 -75.59302 -32.5708 -124.57892c6.3240967 -48.98593 30.680359 -85.59436 60.36798 -90.73541z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m795.4438 654.297l0 0c12.15332 11.953308 26.193237 17.37555 40.23462 15.538818m18.391296 177.90631c6.03833 -1.1807861 11.957031 -3.6812744 17.603455 -7.4369507m151.95251 81.40704c-4.2469482 -10.642822 -7.8027344 -22.015564 -10.6067505 -33.9245m202.4154 -13.861694l0 0c2.191162 -12.12323 3.6108398 -24.600708 4.2353516 -37.224182m136.30164 -91.648254c0.28405762 -59.1109 -19.807129 -113.23346 -51.6438 -139.12042m121.75342 -148.31775c-5.155884 20.128387 -13.026855 37.98398 -22.99585 52.16684m-32.596313 -244.87112l0 0c0.87854004 8.128296 1.2850342 16.378754 1.2141113 24.637024m-135.98535 -84.97896l0 0c-4.816284 9.470413 -8.78418 20.053482 -11.7803955 31.419678m-105.391846 -12.878418l0 0c-2.5666504 8.601898 -4.4832764 17.704193 -5.7056885 27.097183m-128.52551 7.4117737l0 0c7.4939575 7.3342896 14.426819 16.161896 20.646423 26.28888m-181.1488 152.45584l0 0c0.7625122 9.363922 1.9672852 18.612427 3.6030884 27.658905" fill-rule="evenodd"/><path fill="#93c47d" d="m132.88327 64.56065l0 0c0 -3.7975044 3.0784912 -6.875988 6.875992 -6.875988l278.59448 0c1.8236084 0 3.5725403 0.7244339 4.86203 2.0139313c1.2895203 1.2894974 2.0139465 3.0384293 2.0139465 4.8620567l0 27.50312c0 3.7975006 -3.0784912 6.875984 -6.8759766 6.875984l-278.59448 0c-3.7975006 0 -6.875992 -3.0784836 -6.875992 -6.875984z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 57.681087l239.2756 0l0 32.9745l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m187.70293 80.761086l0 -10.484375l3.9375 0q1.203125 0 1.921875 0.3125q0.734375 0.3125 1.140625 0.984375q0.40625 0.65625 0.40625 1.375q0 0.671875 -0.375 1.265625q-0.359375 0.59375 -1.09375 0.96875q0.953125 0.28125 1.46875 0.953125q0.515625 0.671875 0.515625 1.578125q0 0.75 -0.3125 1.390625q-0.3125 0.625 -0.78125 0.96875q-0.453125 0.34375 -1.140625 0.515625q-0.6875 0.171875 -1.6875 0.171875l-4.0 0zm1.390625 -6.078125l2.265625 0q0.921875 0 1.3125 -0.125q0.53125 -0.15625 0.796875 -0.515625q0.28125 -0.375 0.28125 -0.921875q0 -0.53125 -0.25 -0.921875q-0.25 -0.390625 -0.71875 -0.53125q-0.46875 -0.15625 -1.59375 -0.15625l-2.09375 0l0 3.171875zm0 4.84375l2.609375 0q0.671875 0 0.9375 -0.046875q0.484375 -0.09375 0.796875 -0.296875q0.328125 -0.203125 0.53125 -0.578125q0.21875 -0.390625 0.21875 -0.890625q0 -0.578125 -0.3125 -1.015625q-0.296875 -0.4375 -0.828125 -0.609375q-0.53125 -0.171875 -1.53125 -0.171875l-2.421875 0l0 3.609375zm8.259979 1.234375l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.291733 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm5.21875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686493 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2126007 0.21875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.7917328 1.546875l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm12.150101 2.265625l0 -10.484375l7.078125 0l0 1.234375l-5.6875 0l0 3.25l4.921875 0l0 1.234375l-4.921875 0l0 4.765625l-1.390625 0zm9.108871 0l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8219757 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm10.584259 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.943665 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.1150208 -1.953125q0 -2.609375 1.40625 -4.078125q1.40625 -1.484375 3.625 -1.484375q1.453125 0 2.609375 0.703125q1.171875 0.6875 1.78125 1.921875q0.609375 1.234375 0.609375 2.8125q0 1.59375 -0.640625 2.859375q-0.640625 1.265625 -1.828125 1.90625q-1.171875 0.640625 -2.546875 0.640625q-1.46875 0 -2.640625 -0.703125q-1.171875 -0.71875 -1.78125 -1.953125q-0.59375 -1.25 -0.59375 -2.625zm1.4375 0.015625q0 1.90625 1.015625 3.0q1.015625 1.078125 2.5625 1.078125q1.5625 0 2.578125 -1.09375q1.015625 -1.109375 1.015625 -3.125q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.96875 -1.265625 -1.484375q-0.828125 -0.53125 -1.875 -0.53125q-1.46875 0 -2.53125 1.015625q-1.0625 1.015625 -1.0625 3.375zm10.368835 5.09375l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm10.58429 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.537384 0l4.0625 -5.46875l-3.578125 -5.015625l1.65625 0l1.890625 2.6875q0.59375 0.828125 0.84375 1.28125q0.359375 -0.5625 0.84375 -1.1875l2.109375 -2.78125l1.5 0l-3.6875 4.9375l3.984375 5.546875l-1.71875 0l-2.640625 -3.75q-0.21875 -0.3125 -0.46875 -0.6875q-0.34375 0.5625 -0.5 0.78125l-2.625 3.65625l-1.671875 0zm14.941345 0l0 -10.484375l7.59375 0l0 1.234375l-6.203125 0l0 3.203125l5.796875 0l0 1.234375l-5.796875 0l0 3.578125l6.4375 0l0 1.234375l-7.828125 0zm8.728729 0l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm8.1875 2.90625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.5104675 -0.046875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7087708 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m10.267716 19.75853l229.5748 0l0 29.92126l-229.5748 0z" fill-rule="evenodd"/><path fill="#93c47d" d="m20.627092 33.319153l5.34375 0q1.59375 0 2.359375 0.140625q0.78125 0.125 1.390625 0.546875q0.625 0.421875 1.03125 1.125q0.40625 0.6875 0.40625 1.546875q0 0.9375 -0.5 1.734375q-0.5 0.78125 -1.375 1.171875q1.21875 0.359375 1.875 1.21875q0.65625 0.84375 0.65625 2.0q0 0.921875 -0.421875 1.78125q-0.421875 0.859375 -1.15625 1.375q-0.734375 0.515625 -1.796875 0.625q-0.6875 0.078125 -3.265625 0.09375l-4.546875 0l0 -13.359375zm2.703125 2.234375l0 3.078125l1.765625 0q1.578125 0 1.953125 -0.046875q0.703125 -0.078125 1.09375 -0.46875q0.40625 -0.40625 0.40625 -1.046875q0 -0.625 -0.34375 -1.0q-0.34375 -0.390625 -1.015625 -0.484375q-0.40625 -0.03125 -2.3125 -0.03125l-1.546875 0zm0 5.296875l0 3.578125l2.5 0q1.453125 0 1.84375 -0.078125q0.609375 -0.109375 0.984375 -0.53125q0.375 -0.421875 0.375 -1.140625q0 -0.59375 -0.296875 -1.015625q-0.28125 -0.421875 -0.84375 -0.609375q-0.546875 -0.203125 -2.390625 -0.203125l-2.171875 0zm13.207319 5.828125l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm10.400894 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm7.281967 5.671875l-3.90625 -9.671875l2.6875 0l1.828125 4.9375l0.53125 1.640625q0.203125 -0.625 0.265625 -0.828125q0.125 -0.40625 0.265625 -0.8125l1.84375 -4.9375l2.625 0l-3.84375 9.671875l-2.296875 0zm7.719467 -10.984375l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm9.620804 -9.671875l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm3.681427 2.953125l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm4.141342 1.875l2.5625 -0.390625q0.171875 0.75 0.671875 1.140625q0.5 0.390625 1.40625 0.390625q0.984375 0 1.484375 -0.375q0.34375 -0.25 0.34375 -0.671875q0 -0.296875 -0.1875 -0.484375q-0.1875 -0.1875 -0.859375 -0.34375q-3.09375 -0.6875 -3.921875 -1.25q-1.140625 -0.78125 -1.140625 -2.171875q0 -1.265625 0.984375 -2.109375q1.0 -0.859375 3.078125 -0.859375q1.984375 0 2.953125 0.65625q0.96875 0.640625 1.328125 1.90625l-2.40625 0.4375q-0.15625 -0.5625 -0.59375 -0.859375q-0.421875 -0.296875 -1.234375 -0.296875q-1.0 0 -1.4375 0.28125q-0.296875 0.203125 -0.296875 0.515625q0 0.265625 0.25 0.46875q0.34375 0.25 2.390625 0.71875q2.046875 0.453125 2.859375 1.140625q0.796875 0.671875 0.796875 1.890625q0 1.34375 -1.109375 2.296875q-1.109375 0.953125 -3.28125 0.953125q-1.984375 0 -3.140625 -0.796875q-1.140625 -0.8125 -1.5 -2.1875zm16.480896 2.765625l0 -13.359375l9.90625 0l0 2.265625l-7.21875 0l0 2.953125l6.71875 0l0 2.25l-6.71875 0l0 3.640625l7.46875 0l0 2.25l-10.15625 0zm11.193573 0l3.484375 -4.984375l-3.34375 -4.6875l3.125 0l1.71875 2.65625l1.796875 -2.65625l3.015625 0l-3.28125 4.578125l3.578125 5.09375l-3.140625 0l-1.96875 -3.0l-1.984375 3.0l-3.0 0zm11.531967 -9.671875l2.390625 0l0 1.421875q0.46875 -0.734375 1.25 -1.1875q0.796875 -0.453125 1.765625 -0.453125q1.6875 0 2.859375 1.328125q1.171875 1.3125 1.171875 3.671875q0 2.421875 -1.1875 3.765625q-1.1875 1.34375 -2.859375 1.34375q-0.8125 0 -1.46875 -0.3125q-0.640625 -0.328125 -1.359375 -1.09375l0 4.875l-2.5625 0l0 -13.359375zm2.53125 4.671875q0 1.625 0.640625 2.40625q0.65625 0.78125 1.578125 0.78125q0.90625 0 1.484375 -0.71875q0.59375 -0.71875 0.59375 -2.34375q0 -1.515625 -0.609375 -2.25q-0.609375 -0.75 -1.515625 -0.75q-0.9375 0 -1.5625 0.734375q-0.609375 0.71875 -0.609375 2.140625zm8.349106 0.03125q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm11.817856 4.84375l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm9.244644 -9.671875l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m10.267716 119.95801l229.5748 0l0 29.921265l-229.5748 0z" fill-rule="evenodd"/><path fill="#6d9eeb" d="m20.658342 146.878l0 -13.359375l2.625 0l5.453125 8.921875l0 -8.921875l2.515625 0l0 13.359375l-2.703125 0l-5.390625 -8.703125l0 8.703125l-2.5 0zm19.01982 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm9.063217 -4.0l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm3.572052 9.671875l-3.0625 -9.671875l2.484375 0l1.8125 6.34375l1.671875 -6.34375l2.46875 0l1.609375 6.34375l1.859375 -6.34375l2.515625 0l-3.109375 9.671875l-2.453125 0l-1.671875 -6.21875l-1.640625 6.21875l-2.484375 0zm12.120804 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm11.817856 4.84375l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm4.713394 0l0 -13.359375l2.5625 0l0 7.09375l3.0 -3.40625l3.140625 0l-3.296875 3.53125l3.53125 6.140625l-2.75 0l-2.4375 -4.34375l-1.1875 1.25l0 3.09375l-2.5625 0zm15.668396 0l0 -13.359375l4.328125 0q2.453125 0 3.203125 0.203125q1.140625 0.296875 1.921875 1.3125q0.78125 1.0 0.78125 2.59375q0 1.234375 -0.453125 2.078125q-0.453125 0.828125 -1.140625 1.3125q-0.6875 0.46875 -1.390625 0.625q-0.96875 0.203125 -2.796875 0.203125l-1.765625 0l0 5.03125l-2.6875 0zm2.6875 -11.09375l0 3.78125l1.484375 0q1.59375 0 2.125 -0.203125q0.546875 -0.203125 0.84375 -0.65625q0.3125 -0.453125 0.3125 -1.03125q0 -0.734375 -0.4375 -1.203125q-0.421875 -0.484375 -1.078125 -0.59375q-0.484375 -0.09375 -1.9375 -0.09375l-1.3125 0zm12.193573 11.09375l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm10.400894 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm4.547592 -4.0l2.390625 0l0 1.421875q0.46875 -0.734375 1.25 -1.1875q0.796875 -0.453125 1.765625 -0.453125q1.6875 0 2.859375 1.328125q1.171875 1.3125 1.171875 3.671875q0 2.421875 -1.1875 3.765625q-1.1875 1.34375 -2.859375 1.34375q-0.8125 0 -1.46875 -0.3125q-0.640625 -0.328125 -1.359375 -1.09375l0 4.875l-2.5625 0l0 -13.359375zm2.53125 4.671875q0 1.625 0.640625 2.40625q0.65625 0.78125 1.578125 0.78125q0.90625 0 1.484375 -0.71875q0.59375 -0.71875 0.59375 -2.34375q0 -1.515625 -0.609375 -2.25q-0.609375 -0.75 -1.515625 -0.75q-0.9375 0 -1.5625 0.734375q-0.609375 0.71875 -0.609375 2.140625zm10.849106 -1.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm7.500717 4.640625l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm6.713394 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm9.485092 -5.03125l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm1.775177 -1.3125l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.589554 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm18.161606 4.84375l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625z" fill-rule="nonzero"/><path fill="#f1c232" d="m332.85938 918.2178l0 0c0 -4.2473145 3.4431458 -7.6904297 7.69046 -7.6904297l122.79233 0c2.0396423 0 3.9957275 0.8102417 5.4379883 2.2525024c1.4422302 1.4421997 2.252472 3.3983154 2.252472 5.4379272l0 30.760864c0 4.2473145 -3.4431458 7.6904297 -7.69046 7.6904297l-122.79233 0l0 0c-4.2473145 0 -7.69046 -3.4431152 -7.69046 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m336.56036 907.0275l130.77167 0l0 36.472412l-130.77167 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m354.63516 925.99817l0 -1.234375l4.4375 -0.015625l0 3.90625q-1.03125 0.8125 -2.125 1.21875q-1.078125 0.40625 -2.21875 0.40625q-1.546875 0 -2.8125 -0.65625q-1.265625 -0.65625 -1.90625 -1.90625q-0.640625 -1.265625 -0.640625 -2.8125q0 -1.53125 0.640625 -2.84375q0.640625 -1.328125 1.84375 -1.96875q1.203125 -0.65625 2.765625 -0.65625q1.140625 0 2.0625 0.375q0.921875 0.359375 1.4375 1.03125q0.53125 0.65625 0.796875 1.703125l-1.25 0.34375q-0.234375 -0.796875 -0.59375 -1.25q-0.34375 -0.46875 -1.0 -0.734375q-0.65625 -0.28125 -1.4375 -0.28125q-0.953125 0 -1.65625 0.296875q-0.6875 0.28125 -1.125 0.765625q-0.421875 0.46875 -0.65625 1.03125q-0.390625 0.96875 -0.390625 2.109375q0 1.40625 0.46875 2.359375q0.484375 0.9375 1.40625 1.390625q0.9375 0.453125 1.96875 0.453125q0.90625 0 1.765625 -0.34375q0.859375 -0.34375 1.296875 -0.734375l0 -1.953125l-3.078125 0zm11.525085 1.65625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 4.53125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354218 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849396 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.1198425 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.462616 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.4105835 4.53125l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm11.353729 5.5l0 -4.4375l-4.046875 -6.046875l1.6875 0l2.0625 3.15625q0.578125 0.890625 1.0625 1.78125q0.484375 -0.828125 1.15625 -1.859375l2.03125 -3.078125l1.609375 0l-4.1875 6.046875l0 4.4375l-1.375 0zm6.7912292 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm18.553009 -1.125q0.96875 0.671875 1.78125 0.96875l-0.40625 0.96875q-1.125 -0.40625 -2.25 -1.28125q-1.171875 0.640625 -2.578125 0.640625q-1.421875 0 -2.59375 -0.671875q-1.15625 -0.6875 -1.78125 -1.9375q-0.625 -1.25 -0.625 -2.8125q0 -1.546875 0.625 -2.8125q0.640625 -1.28125 1.796875 -1.9375q1.171875 -0.671875 2.609375 -0.671875q1.453125 0 2.609375 0.6875q1.171875 0.6875 1.78125 1.9375q0.609375 1.234375 0.609375 2.796875q0 1.296875 -0.390625 2.328125q-0.390625 1.03125 -1.1875 1.796875zm-3.046875 -1.765625q1.203125 0.328125 1.984375 1.0q1.21875 -1.125 1.21875 -3.359375q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.953125 -1.265625 -1.46875q-0.828125 -0.53125 -1.875 -0.53125q-1.546875 0 -2.578125 1.0625q-1.015625 1.0625 -1.015625 3.171875q0 2.046875 1.015625 3.140625q1.015625 1.09375 2.578125 1.09375q0.75 0 1.40625 -0.265625q-0.65625 -0.421875 -1.375 -0.609375l0.34375 -1.0z" fill-rule="nonzero"/><path fill="#ffffff" d="m360.53717 948.10754l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.880646 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.166748 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.963593 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.259491 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.411896 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm16.207458 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm1.890625 -1.015625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494843 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#f1c232" d="m91.84247 917.8458l0 0c0 -4.2473145 3.4431305 -7.6904297 7.6904373 -7.6904297l122.79235 0c2.039627 0 3.9957275 0.8102417 5.437958 2.2525024c1.4422455 1.4421997 2.2524872 3.3983154 2.2524872 5.4379272l0 30.760864c0 4.2473145 -3.4431305 7.6904297 -7.690445 7.6904297l-122.79235 0l0 0c-4.247307 0 -7.6904373 -3.4431152 -7.6904373 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m95.54325 907.3982l130.77167 0l0 40.503906l-130.77167 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m127.778625 927.10315l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm10.025604 6.296875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm7.390625 -2.921875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.963608 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594757 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354233 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.666733 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7405243 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m143.04684 948.47815l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm18.035736 4.265625q-0.640625 0.703125 -1.390625 1.0625q-0.75 0.34375 -1.625 0.34375q-1.609375 0 -2.5625 -1.078125q-0.765625 -0.890625 -0.765625 -1.984375q0 -0.96875 0.625 -1.75q0.625 -0.796875 1.875 -1.390625q-0.703125 -0.8125 -0.9375 -1.3125q-0.234375 -0.515625 -0.234375 -0.984375q0 -0.953125 0.734375 -1.640625q0.734375 -0.703125 1.859375 -0.703125q1.078125 0 1.75 0.65625q0.6875 0.65625 0.6875 1.578125q0 1.5 -1.96875 2.5625l1.875 2.390625q0.328125 -0.625 0.5 -1.453125l1.34375 0.28125q-0.34375 1.375 -0.9375 2.265625q0.71875 0.953125 1.640625 1.609375l-0.859375 1.03125q-0.78125 -0.5 -1.609375 -1.484375zm-2.625 -5.46875q0.84375 -0.5 1.078125 -0.859375q0.25 -0.375 0.25 -0.828125q0 -0.546875 -0.34375 -0.875q-0.328125 -0.34375 -0.84375 -0.34375q-0.515625 0 -0.875 0.34375q-0.34375 0.328125 -0.34375 0.8125q0 0.25 0.125 0.515625q0.125 0.265625 0.375 0.5625l0.578125 0.671875zm1.8125 4.453125l-2.359375 -2.9375q-1.046875 0.625 -1.421875 1.15625q-0.359375 0.53125 -0.359375 1.0625q0 0.640625 0.5 1.328125q0.515625 0.6875 1.453125 0.6875q0.578125 0 1.1875 -0.359375q0.625 -0.375 1.0 -0.9375zm8.84758 2.25l0 -10.484375l4.640625 0q1.40625 0 2.140625 0.28125q0.734375 0.28125 1.171875 1.0q0.4375 0.703125 0.4375 1.578125q0 1.109375 -0.734375 1.875q-0.71875 0.765625 -2.234375 0.984375q0.5625 0.25 0.84375 0.515625q0.609375 0.5625 1.15625 1.390625l1.8125 2.859375l-1.734375 0l-1.390625 -2.1875q-0.609375 -0.9375 -1.0 -1.4375q-0.390625 -0.5 -0.703125 -0.703125q-0.3125 -0.203125 -0.640625 -0.28125q-0.234375 -0.046875 -0.765625 -0.046875l-1.609375 0l0 4.65625l-1.390625 0zm1.390625 -5.859375l2.984375 0q0.9375 0 1.46875 -0.1875q0.546875 -0.203125 0.828125 -0.640625q0.28125 -0.4375 0.28125 -0.9375q0 -0.75 -0.546875 -1.21875q-0.53125 -0.484375 -1.703125 -0.484375l-3.3125 0l0 3.46875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m10.267716 513.6772l229.5748 0l0 29.921265l-229.5748 0z" fill-rule="evenodd"/><path fill="#e69138" d="m24.033342 540.59717l-4.78125 -13.359375l2.9375 0l3.375 9.890625l3.265625 -9.890625l2.859375 0l-4.78125 13.359375l-2.875 0zm8.684462 -10.984375l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm7.839554 0l-3.90625 -9.671875l2.6875 0l1.828125 4.9375l0.53125 1.640625q0.203125 -0.625 0.265625 -0.828125q0.125 -0.40625 0.265625 -0.8125l1.84375 -4.9375l2.625 0l-3.84375 9.671875l-2.296875 0zm9.625717 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm13.922592 4.640625l-2.390625 0l0 -1.421875q-0.59375 0.828125 -1.40625 1.234375q-0.796875 0.40625 -1.609375 0.40625q-1.671875 0 -2.859375 -1.34375q-1.1875 -1.34375 -1.1875 -3.75q0 -2.453125 1.15625 -3.734375q1.15625 -1.28125 2.921875 -1.28125q1.625 0 2.8125 1.34375l0 -4.8125l2.5625 0l0 13.359375zm-6.84375 -5.046875q0 1.546875 0.4375 2.234375q0.609375 1.015625 1.71875 1.015625q0.890625 0 1.5 -0.75q0.625 -0.765625 0.625 -2.25q0 -1.671875 -0.609375 -2.40625q-0.59375 -0.734375 -1.53125 -0.734375q-0.90625 0 -1.53125 0.734375q-0.609375 0.71875 -0.609375 2.15625zm8.770981 0.078125q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm14.56366 4.84375l0 -13.359375l2.703125 0l0 5.265625l5.28125 0l0 -5.265625l2.703125 0l0 13.359375l-2.703125 0l0 -5.84375l-5.28125 0l0 5.84375l-2.703125 0zm13.551071 0l0 -13.25l2.6875 0l0 11.0l6.703125 0l0 2.25l-9.390625 0zm10.630356 -4.34375l2.625 -0.25q0.234375 1.3125 0.953125 1.9375q0.734375 0.609375 1.96875 0.609375q1.296875 0 1.953125 -0.546875q0.671875 -0.546875 0.671875 -1.28125q0 -0.484375 -0.28125 -0.8125q-0.28125 -0.328125 -0.96875 -0.578125q-0.484375 -0.15625 -2.171875 -0.578125q-2.15625 -0.546875 -3.03125 -1.328125q-1.234375 -1.09375 -1.234375 -2.6875q0 -1.015625 0.578125 -1.90625q0.578125 -0.890625 1.65625 -1.34375q1.09375 -0.46875 2.640625 -0.46875q2.515625 0 3.78125 1.109375q1.28125 1.09375 1.34375 2.9375l-2.703125 0.109375q-0.171875 -1.03125 -0.75 -1.46875q-0.5625 -0.453125 -1.703125 -0.453125q-1.171875 0 -1.84375 0.46875q-0.421875 0.3125 -0.421875 0.84375q0 0.46875 0.40625 0.796875q0.5 0.4375 2.46875 0.90625q1.96875 0.453125 2.90625 0.953125q0.953125 0.5 1.484375 1.359375q0.53125 0.859375 0.53125 2.125q0 1.15625 -0.640625 2.15625q-0.640625 1.0 -1.8125 1.484375q-1.15625 0.484375 -2.890625 0.484375q-2.53125 0 -3.890625 -1.171875q-1.359375 -1.171875 -1.625 -3.40625zm30.126877 2.71875l-1.546875 1.96875q-1.140625 -0.546875 -2.1875 -1.53125q-0.8125 0.734375 -1.71875 1.078125q-0.90625 0.34375 -2.171875 0.34375q-2.5 0 -3.75 -1.40625q-0.984375 -1.078125 -0.984375 -2.484375q0 -1.296875 0.765625 -2.3125q0.765625 -1.03125 2.296875 -1.78125q-0.6875 -0.8125 -1.03125 -1.546875q-0.34375 -0.734375 -0.34375 -1.390625q0 -1.203125 0.96875 -2.046875q0.96875 -0.84375 2.765625 -0.84375q1.734375 0 2.703125 0.890625q0.96875 0.875 0.96875 2.140625q0 0.8125 -0.484375 1.546875q-0.46875 0.734375 -1.9375 1.65625l1.859375 2.4375q0.328125 -0.578125 0.5625 -1.515625l2.3125 0.53125q-0.34375 1.21875 -0.609375 1.796875q-0.25 0.5625 -0.546875 0.953125q0.4375 0.40625 1.109375 0.890625q0.6875 0.46875 1.0 0.625zm-6.96875 -7.265625l0.6875 -0.53125q0.765625 -0.59375 0.765625 -1.171875q0 -0.5 -0.375 -0.84375q-0.359375 -0.34375 -0.984375 -0.34375q-0.609375 0 -0.96875 0.3125q-0.34375 0.296875 -0.34375 0.703125q0 0.46875 0.59375 1.140625l0.625 0.734375zm-1.0 2.78125q-0.890625 0.4375 -1.328125 1.078125q-0.4375 0.625 -0.4375 1.28125q0 0.828125 0.546875 1.359375q0.546875 0.515625 1.453125 0.515625q0.609375 0 1.15625 -0.234375q0.5625 -0.234375 1.21875 -0.78125l-2.609375 -3.21875zm14.734375 6.109375l0 -13.359375l2.6875 0l0 13.359375l-2.6875 0zm5.261429 0l0 -13.359375l4.328125 0q2.453125 0 3.203125 0.203125q1.140625 0.296875 1.921875 1.3125q0.78125 1.0 0.78125 2.59375q0 1.234375 -0.453125 2.078125q-0.453125 0.828125 -1.140625 1.3125q-0.6875 0.46875 -1.390625 0.625q-0.96875 0.203125 -2.796875 0.203125l-1.765625 0l0 5.03125l-2.6875 0zm2.6875 -11.09375l0 3.78125l1.484375 0q1.59375 0 2.125 -0.203125q0.546875 -0.203125 0.84375 -0.65625q0.3125 -0.453125 0.3125 -1.03125q0 -0.734375 -0.4375 -1.203125q-0.421875 -0.484375 -1.078125 -0.59375q-0.484375 -0.09375 -1.9375 -0.09375l-1.3125 0zm9.677948 11.09375l0 -13.359375l2.6875 0l0 13.359375l-2.6875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m10.267716 709.0789l229.5748 0l0 64.44098l-229.5748 0z" fill-rule="evenodd"/><path fill="#f1c232" d="m20.627092 735.99896l0 -13.359375l4.328125 0q2.453125 0 3.203125 0.203125q1.140625 0.296875 1.921875 1.3125q0.78125 1.0 0.78125 2.59375q0 1.234375 -0.453125 2.078125q-0.453125 0.828125 -1.140625 1.3125q-0.6875 0.46875 -1.390625 0.625q-0.96875 0.203125 -2.796875 0.203125l-1.765625 0l0 5.03125l-2.6875 0zm2.6875 -11.09375l0 3.78125l1.484375 0q1.59375 0 2.125 -0.203125q0.546875 -0.203125 0.84375 -0.65625q0.3125 -0.453125 0.3125 -1.03125q0 -0.734375 -0.4375 -1.203125q-0.421875 -0.484375 -1.078125 -0.59375q-0.484375 -0.09375 -1.9375 -0.09375l-1.3125 0zm13.256071 11.09375l0 -5.625l-4.890623 -7.734375l3.171873 0l3.140625 5.28125l3.078125 -5.28125l3.109375 0l-4.921875 7.75l0 5.609375l-2.6875 0zm8.974823 0l0 -13.359375l2.625 0l5.453125 8.921875l0 -8.921875l2.515625 0l0 13.359375l-2.703125 0l-5.390625 -8.703125l0 8.703125l-2.5 0zm24.191696 -1.6875q0.984375 0.703125 2.15625 1.125l-1.0 1.90625q-0.609375 -0.1875 -1.1875 -0.5q-0.125 -0.0625 -1.796875 -1.1875q-1.3125 0.578125 -2.90625 0.578125q-3.078125 0 -4.828125 -1.8125q-1.734375 -1.8125 -1.734375 -5.09375q0 -3.28125 1.75 -5.09375q1.75 -1.8125 4.75 -1.8125q2.96875 0 4.703125 1.8125q1.734375 1.8125 1.734375 5.09375q0 1.71875 -0.484375 3.03125q-0.359375 1.015625 -1.15625 1.953125zm-2.171875 -1.53125q0.515625 -0.609375 0.78125 -1.46875q0.265625 -0.875 0.265625 -1.984375q0 -2.3125 -1.03125 -3.453125q-1.015625 -1.15625 -2.671875 -1.15625q-1.640625 0 -2.671875 1.15625q-1.015625 1.140625 -1.015625 3.453125q0 2.328125 1.015625 3.5q1.03125 1.15625 2.59375 1.15625q0.578125 0 1.109375 -0.1875q-0.828125 -0.53125 -1.671875 -0.84375l0.75 -1.53125q1.34375 0.453125 2.546875 1.359375zm11.116608 3.21875l0 -13.359375l2.703125 0l0 5.265625l5.28125 0l0 -5.265625l2.703125 0l0 13.359375l-2.703125 0l0 -5.84375l-5.28125 0l0 5.84375l-2.703125 0zm15.363571 0l-3.1875 -13.359375l2.765625 0l2.015625 9.171875l2.4375 -9.171875l3.203125 0l2.34375 9.328125l2.046875 -9.328125l2.71875 0l-3.25 13.359375l-2.859375 0l-2.65625 -9.984375l-2.65625 9.984375l-2.921875 0zm27.104462 -4.90625l0 -2.25l5.8125 0l0 5.3125q-0.84375 0.828125 -2.453125 1.453125q-1.609375 0.625 -3.25 0.625q-2.09375 0 -3.65625 -0.875q-1.5625 -0.890625 -2.34375 -2.515625q-0.78125 -1.640625 -0.78125 -3.5625q0 -2.09375 0.875 -3.703125q0.875 -1.625 2.5625 -2.5q1.28125 -0.65625 3.203125 -0.65625q2.484375 0 3.875 1.046875q1.40625 1.03125 1.796875 2.875l-2.671875 0.5q-0.28125 -0.984375 -1.0625 -1.546875q-0.78125 -0.578125 -1.9375 -0.578125q-1.78125 0 -2.828125 1.125q-1.03125 1.125 -1.03125 3.328125q0 2.375 1.046875 3.5625q1.0625 1.1875 2.78125 1.1875q0.84375 0 1.6875 -0.328125q0.859375 -0.328125 1.46875 -0.8125l0 -1.6875l-3.09375 0zm13.886429 1.828125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm13.422592 5.671875l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625zm19.610535 -1.625l-1.546875 1.96875q-1.140625 -0.546875 -2.1875 -1.53125q-0.8125 0.734375 -1.71875 1.078125q-0.90625 0.34375 -2.171875 0.34375q-2.5 0 -3.75 -1.40625q-0.984375 -1.078125 -0.984375 -2.484375q0 -1.296875 0.765625 -2.3125q0.765625 -1.03125 2.296875 -1.78125q-0.6875 -0.8125 -1.03125 -1.546875q-0.34375 -0.734375 -0.34375 -1.390625q0 -1.203125 0.96875 -2.046875q0.96875 -0.84375 2.765625 -0.84375q1.734375 0 2.703125 0.890625q0.96875 0.875 0.96875 2.140625q0 0.8125 -0.484375 1.546875q-0.46875 0.734375 -1.9375 1.65625l1.859375 2.4375q0.328125 -0.578125 0.5625 -1.515625l2.3125 0.53125q-0.34375 1.21875 -0.609375 1.796875q-0.25 0.5625 -0.546875 0.953125q0.4375 0.40625 1.109375 0.890625q0.6875 0.46875 1.0 0.625zm-6.96875 -7.265625l0.6875 -0.53125q0.765625 -0.59375 0.765625 -1.171875q0 -0.5 -0.375 -0.84375q-0.359375 -0.34375 -0.984375 -0.34375q-0.609375 0 -0.96875 0.3125q-0.34375 0.296875 -0.34375 0.703125q0 0.46875 0.59375 1.140625l0.625 0.734375zm-1.0 2.78125q-0.890625 0.4375 -1.328125 1.078125q-0.4375 0.625 -0.4375 1.28125q0 0.828125 0.546875 1.359375q0.546875 0.515625 1.453125 0.515625q0.609375 0 1.15625 -0.234375q0.5625 -0.234375 1.21875 -0.78125l-2.609375 -3.21875z" fill-rule="nonzero"/><path fill="#f1c232" d="m20.611467 744.6396l4.9375 0q1.65625 0 2.53125 0.265625q1.1875 0.34375 2.015625 1.234375q0.84375 0.875 1.28125 2.15625q0.4375 1.28125 0.4375 3.15625q0 1.640625 -0.40625 2.84375q-0.5 1.453125 -1.4375 2.359375q-0.703125 0.671875 -1.890625 1.0625q-0.890625 0.28125 -2.390625 0.28125l-5.078125 0l0 -13.359375zm2.703125 2.265625l0 8.84375l2.015625 0q1.125 0 1.625 -0.125q0.65625 -0.171875 1.078125 -0.5625q0.4375 -0.390625 0.703125 -1.28125q0.28125 -0.90625 0.28125 -2.453125q0 -1.546875 -0.28125 -2.375q-0.265625 -0.828125 -0.765625 -1.28125q-0.484375 -0.46875 -1.234375 -0.640625q-0.5625 -0.125 -2.21875 -0.125l-1.203125 0zm16.36357 8.015625l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm4.547592 -4.0l2.390625 0l0 1.421875q0.46875 -0.734375 1.25 -1.1875q0.796875 -0.453125 1.765625 -0.453125q1.6875 0 2.859375 1.328125q1.171875 1.3125 1.171875 3.671875q0 2.421875 -1.1875 3.765625q-1.1875 1.34375 -2.859375 1.34375q-0.8125 0 -1.46875 -0.3125q-0.640625 -0.328125 -1.359375 -1.09375l0 4.875l-2.5625 0l0 -13.359375zm2.53125 4.671875q0 1.625 0.640625 2.40625q0.65625 0.78125 1.578125 0.78125q0.90625 0 1.484375 -0.71875q0.59375 -0.71875 0.59375 -2.34375q0 -1.515625 -0.609375 -2.25q-0.609375 -0.75 -1.515625 -0.75q-0.9375 0 -1.5625 0.734375q-0.609375 0.71875 -0.609375 2.140625zm8.942856 5.0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm4.589554 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm8.145981 -4.828125l2.71875 0l2.328125 6.859375l2.25 -6.859375l2.65625 0l-3.421875 9.3125l-0.609375 1.6875q-0.328125 0.84375 -0.640625 1.28125q-0.3125 0.453125 -0.703125 0.71875q-0.390625 0.28125 -0.96875 0.4375q-0.578125 0.15625 -1.3125 0.15625q-0.734375 0 -1.453125 -0.15625l-0.21875 -2.0q0.59375 0.125 1.078125 0.125q0.890625 0 1.3125 -0.53125q0.4375 -0.515625 0.671875 -1.328125l-3.6875 -9.703125zm11.391342 0l2.359375 0l0 1.3125q1.265625 -1.53125 3.015625 -1.53125q0.9375 0 1.609375 0.390625q0.6875 0.375 1.125 1.140625q0.640625 -0.765625 1.375 -1.140625q0.75 -0.390625 1.578125 -0.390625q1.0625 0 1.796875 0.4375q0.75 0.421875 1.109375 1.265625q0.265625 0.625 0.265625 2.0l0 6.1875l-2.5625 0l0 -5.53125q0 -1.4375 -0.265625 -1.859375q-0.34375 -0.546875 -1.09375 -0.546875q-0.53125 0 -1.015625 0.328125q-0.46875 0.328125 -0.671875 0.96875q-0.203125 0.625 -0.203125 2.0l0 4.640625l-2.5625 0l0 -5.296875q0 -1.421875 -0.140625 -1.828125q-0.140625 -0.40625 -0.421875 -0.609375q-0.28125 -0.203125 -0.78125 -0.203125q-0.59375 0 -1.0625 0.328125q-0.46875 0.3125 -0.6875 0.921875q-0.203125 0.59375 -0.203125 1.984375l0 4.703125l-2.5625 0l0 -9.671875zm22.38527 6.59375l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm13.422592 5.671875l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625zm7.036606 -9.671875l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.88327 162.76764l0 0c0 -3.797943 3.0788422 -6.8767853 6.8767853 -6.8767853l278.5929 0c1.823822 0 3.5729675 0.7245178 4.86261 2.0141602c1.2896423 1.2896423 2.0141602 3.0387878 2.0141602 4.862625l0 27.506287c0 3.7979279 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788422 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 156.18744l239.2756 0l0 21.070877l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m184.40805 175.89244l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm12.900604 2.21875l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2438507 1.15625l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073578 3.8125l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm12.177521 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.2873993 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561493 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm8.151108 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561493 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.06248474 0.359375 0.06248474 1.25l0 4.671875l-1.2812347 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916733 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm13.958099 3.9375l0 -9.25l-3.453125 0l0 -1.234375l8.3125 0l0 1.234375l-3.46875 0l0 9.25l-1.390625 0zm5.5646057 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849396 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.6354675 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.15625 2.265625l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.275116 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717896 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.3073425 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.635498 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.88327 260.97858l0 0c0 -3.797943 3.0788422 -6.8767853 6.8767853 -6.8767853l278.5929 0c1.823822 0 3.5729675 0.7245178 4.86261 2.0141754c1.2896423 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.078827 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 254.39838l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m213.53659 273.80652l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.4592743 -0.125q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307358 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.260483 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166733 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7087708 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm8.143951 0l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.77511597 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm11.5824585 3.796875l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.490509 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.26944 3.375l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm12.994873 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.2448425 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.3806458 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.88327 359.1895l0 0c0 -3.797943 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.078827 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 352.6093l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m212.97243 375.68933l0 -10.484375l3.625 0q1.21875 0 1.859375 0.140625q0.90625 0.203125 1.546875 0.75q0.828125 0.703125 1.234375 1.796875q0.40625 1.09375 0.40625 2.5q0 1.1875 -0.28125 2.109375q-0.265625 0.921875 -0.703125 1.53125q-0.4375 0.609375 -0.96875 0.953125q-0.515625 0.34375 -1.25 0.53125q-0.71875 0.171875 -1.671875 0.171875l-3.796875 0zm1.390625 -1.234375l2.25 0q1.03125 0 1.625 -0.1875q0.59375 -0.203125 0.9375 -0.546875q0.5 -0.5 0.765625 -1.328125q0.28125 -0.84375 0.28125 -2.03125q0 -1.640625 -0.546875 -2.515625q-0.53125 -0.890625 -1.3125 -1.1875q-0.546875 -0.21875 -1.796875 -0.21875l-2.203125 0l0 8.015625zm13.990524 0.296875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2126007 0.21875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.6198578 3.8125l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.7282257 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.8030243 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625153 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84376526 0.46875 -1.8437653 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.64064026 -0.734375 0.64064026 -2.234375q0 -1.40625 -0.64064026 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm8.713608 3.796875l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm13.40625 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm13.181854 4.5625q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7087708 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717896 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.3073425 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm8.151123 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.87679 457.40045l0 0c0 -3.797943 3.0788422 -6.87677 6.87677 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7245178 4.86261 2.0141602c1.2896423 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.078827 6.87677 -6.87677 6.87677l-278.5929 0c-3.7979279 0 -6.87677 -3.078827 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m137.01575 450.82126l288.2205 0l0 21.070862l-288.2205 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m152.81091 473.90125l4.015625 -10.484375l1.5 0l4.296875 10.484375l-1.578125 0l-1.234375 -3.171875l-4.375 0l-1.15625 3.171875l-1.46875 0zm3.015625 -4.3125l3.5625 0l-1.09375 -2.90625q-0.5 -1.3125 -0.75 -2.171875q-0.203125 1.015625 -0.5625 2.0l-1.15625 3.078125zm12.666229 4.3125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.276108 -5.203125l0 -1.484375l1.296875 0l0 1.484375l-1.296875 0zm-1.625 11.953125l0.25 -1.09375q0.375 0.09375 0.59375 0.09375q0.40625 0 0.59375 -0.265625q0.1875 -0.25 0.1875 -1.296875l0 -7.984375l1.296875 0l0 8.015625q0 1.390625 -0.375 1.953125q-0.453125 0.703125 -1.53125 0.703125q-0.53125 0 -1.015625 -0.125zm9.881149 -2.953125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.6511078 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm5.6439514 1.15625l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2751007 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.276108 3.796875l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.209274 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291733 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561493 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916733 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm14.2074585 2.78125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751007 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm11.3793335 3.796875l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.447998 3.8125l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm8.1875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm12.208771 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.5686646 0l0 -1.046875l4.84375 -5.546875q-0.828125 0.03125 -1.453125 0.03125l-3.09375 0l0 -1.03125l6.203125 0l0 0.84375l-4.109375 4.828125l-0.796875 0.875q0.859375 -0.0625 1.625 -0.0625l3.515625 0l0 1.109375l-6.734375 0zm13.21875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.2543335 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197968 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm5.2087708 0l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2750854 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073425 3.8125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.104218 -2.78125l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm7.578125 0.328125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#e69138" d="m132.88327 555.6114l0 0c0 -3.7979736 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788574 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 549.0312l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m210.00502 568.43933l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.9280243 3.671875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.4626007 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.4574585 4.53125l0 -10.484375l1.390625 0l0 4.296875l5.4531097 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.4531097 0l0 4.9375l-1.390625 0zm10.490524 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.7292175 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.55072 3.375l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8375854 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm12.035736 8.40625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197998 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.1667175 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.937012 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240509 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.1667175 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#e69138" d="m132.88327 653.8223l0 0c0 -3.7979736 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788574 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 647.2421l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m214.7496 666.65027l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.9280243 3.671875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.4626007 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm10.7387085 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2125854 -1.625l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104218 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm16.285583 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.6667175 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.0217896 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm7.3229675 3.9375l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0z" fill-rule="nonzero"/><path fill="#f1c232" d="m132.88327 752.03326l0 0c0 -3.7979736 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788574 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 745.45306l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m201.72858 764.8612l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.9280243 3.671875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.4626007 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.4105835 4.53125l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm11.353729 5.5l0 -4.4375l-4.046875 -6.046875l1.6875 0l2.0625 3.15625q0.578125 0.890625 1.0625 1.78125q0.484375 -0.828125 1.15625 -1.859375l2.03125 -3.078125l1.609375 0l-4.1875 6.046875l0 4.4375l-1.375 0zm6.7912292 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm18.55304 -1.125q0.96875 0.671875 1.78125 0.96875l-0.40625 0.96875q-1.125 -0.40625 -2.25 -1.28125q-1.171875 0.640625 -2.578125 0.640625q-1.421875 0 -2.59375 -0.671875q-1.15625 -0.6875 -1.78125 -1.9375q-0.625 -1.25 -0.625 -2.8125q0 -1.546875 0.625 -2.8125q0.640625 -1.28125 1.796875 -1.9375q1.171875 -0.671875 2.609375 -0.671875q1.453125 0 2.609375 0.6875q1.171875 0.6875 1.78125 1.9375q0.609375 1.234375 0.609375 2.796875q0 1.296875 -0.390625 2.328125q-0.390625 1.03125 -1.1875 1.796875zm-3.046875 -1.765625q1.203125 0.328125 1.984375 1.0q1.21875 -1.125 1.21875 -3.359375q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.953125 -1.265625 -1.46875q-0.828125 -0.53125 -1.875 -0.53125q-1.546875 0 -2.578125 1.0625q-1.015625 1.0625 -1.015625 3.171875q0 2.046875 1.015625 3.140625q1.015625 1.09375 2.578125 1.09375q0.75 0 1.40625 -0.265625q-0.65625 -0.421875 -1.375 -0.609375l0.34375 -1.0zm9.893951 0.625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 2.265625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354218 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.256134 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm7.359375 2.90625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.979248 3.75l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.4118958 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 -5.203125l0 -1.484375l1.296875 0l0 1.484375l-1.296875 0zm-1.625 11.953125l0.25 -1.09375q0.375 0.09375 0.59375 0.09375q0.40625 0 0.59375 -0.265625q0.1875 -0.25 0.1875 -1.296875l0 -7.984375l1.296875 0l0 8.015625q0 1.390625 -0.375 1.953125q-0.453125 0.703125 -1.53125 0.703125q-0.53125 0 -1.015625 -0.125zm10.099915 -5.40625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135468 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm5.1875 1.625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875z" fill-rule="nonzero"/><path fill="#f1c232" d="m132.88466 1028.0884l0 0c0 -3.7979736 3.0788422 -6.87677 6.87677 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506226c0 3.7979736 -3.078827 6.876831 -6.87677 6.876831l-278.5929 0c-3.7979279 0 -6.87677 -3.0788574 -6.87677 -6.876831z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.42009 1021.5082l239.27559 0l0 21.070923l-239.27559 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m225.42546 1044.5881l0 -9.25l-3.453125 0l0 -1.234375l8.3125 0l0 1.234375l-3.46875 0l0 9.25l-1.390625 0zm9.701965 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.666733 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm4.8470764 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.3073425 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm12.4262085 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm15.33429 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.818146 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm8.697968 3.796875l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm14.131165 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.2917175 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 19.75853l258.01578 0l0 35.905514l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 19.75853l258.01578 0l0 35.905514l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m485.71155 42.83853l0 -9.25l-3.453125 0l0 -1.234375l8.3125 0l0 1.234375l-3.46875 0l0 9.25l-1.390625 0zm5.5646057 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849396 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073425 -5.203125l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.2655945 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.5468445 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354218 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.5043335 3.796875l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.64679 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994812 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm11.400085 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm12.3793335 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.166199 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.203125 -4.078125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751465 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291687 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849426 -2.78125l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.182373 0.171875l3.046875 -10.84375l1.03125 0l-3.046875 10.84375l-1.03125 0zm5.1500854 -0.171875l0 -10.484375l3.9375 0q1.203125 0 1.921875 0.3125q0.734375 0.3125 1.140625 0.984375q0.40625 0.65625 0.40625 1.375q0 0.671875 -0.375 1.265625q-0.359375 0.59375 -1.09375 0.96875q0.953125 0.28125 1.46875 0.953125q0.515625 0.671875 0.515625 1.578125q0 0.75 -0.3125 1.390625q-0.3125 0.625 -0.78125 0.96875q-0.453125 0.34375 -1.140625 0.515625q-0.6875 0.171875 -1.6875 0.171875l-4.0 0zm1.390625 -6.078125l2.265625 0q0.921875 0 1.3125 -0.125q0.53125 -0.15625 0.796875 -0.515625q0.28125 -0.375 0.28125 -0.921875q0 -0.53125 -0.25 -0.921875q-0.25 -0.390625 -0.71875 -0.53125q-0.46875 -0.15625 -1.59375 -0.15625l-2.09375 0l0 3.171875zm0 4.84375l2.609375 0q0.671875 0 0.9375 -0.046875q0.484375 -0.09375 0.796875 -0.296875q0.328125 -0.203125 0.53125 -0.578125q0.21875 -0.390625 0.21875 -0.890625q0 -0.578125 -0.3125 -1.015625q-0.296875 -0.4375 -0.828125 -0.609375q-0.53125 -0.171875 -1.53125 -0.171875l-2.421875 0l0 3.609375zm8.259949 1.234375l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.291748 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm5.21875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686035 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2126465 0.21875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.791687 1.546875l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 113.37795l258.01578 0l0 35.90551l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 113.37795l258.01578 0l0 35.90551l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m486.41956 136.45795l0 -10.484367l1.4375 0l5.5 8.234367l0 -8.234367l1.328125 0l0 10.484367l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.8906174l1.28125 -0.765625l0 2.6562424l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.2187805 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.5312805 5.84375l-1.34375 0zm8.693634 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484367l1.296875 0l0 5.9687424l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.7656174 0.125 -1.1406174q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9374924l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm7.831421 0l0 -10.484367l1.28125 0l0 3.7499924q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm8.151123 -9.015617l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015617l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.0217896 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm7.322937 3.9375l0 -10.484367l1.28125 0l0 3.7499924q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.651123 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.3494263 3.15625l0 -10.484367l1.28125 0l0 10.484367l-1.28125 0zm8.49054 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.656311 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.000061 -1.0625 2.578186 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.656311 0q0.0625 1.25 0.70318604 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234436 -2.078125l4.250061 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.60943604 0.59375 -0.67193604 1.59375zm9.291748 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484367l1.28125 0l0 10.484367l-1.28125 0zm7.09375 -5.109375q0 -2.609375 1.40625 -4.0781174q1.40625 -1.484375 3.625 -1.484375q1.453125 0 2.609375 0.703125q1.171875 0.6875 1.78125 1.9218674q0.609375 1.234375 0.609375 2.8125q0 1.59375 -0.640625 2.859375q-0.640625 1.265625 -1.828125 1.90625q-1.171875 0.640625 -2.546875 0.640625q-1.46875 0 -2.640625 -0.703125q-1.171875 -0.71875 -1.78125 -1.953125q-0.59375 -1.25 -0.59375 -2.625zm1.4375 0.015625q0 1.90625 1.015625 3.0q1.015625 1.078125 2.5625 1.078125q1.5625 0 2.578125 -1.09375q1.015625 -1.109375 1.015625 -3.125q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.96875 -1.265625 -1.4843674q-0.828125 -0.53125 -1.875 -0.53125q-1.46875 0 -2.53125 1.015625q-1.0625 1.0156174 -1.0625 3.3749924zm10.368835 5.09375l0 -10.484367l1.4375 0l5.5 8.234367l0 -8.234367l1.328125 0l0 10.484367l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm10.58429 0l0 -10.484367l1.4375 0l5.5 8.234367l0 -8.234367l1.328125 0l0 10.484367l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.537415 0l4.0625 -5.46875l-3.578125 -5.0156174l1.65625 0l1.890625 2.6874924q0.59375 0.828125 0.84375 1.28125q0.359375 -0.5625 0.84375 -1.1875l2.109375 -2.7812424l1.5 0l-3.6875 4.9374924l3.984375 5.546875l-1.71875 0l-2.640625 -3.75q-0.21875 -0.3125 -0.46875 -0.6875q-0.34375 0.5625 -0.5 0.78125l-2.625 3.65625l-1.671875 0zm14.722595 0l0 -10.484367l1.28125 0l0 10.484367l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 206.99738l349.0079 0l0 35.905518l-349.0079 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 206.99738l349.0079 0l0 35.905518l-349.0079 0z" fill-rule="evenodd"/><path fill="#000000" d="m491.54065 226.70238l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm12.900604 2.21875l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2438354 1.15625l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135498 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm12.17749 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.2874146 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354187 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.3637085 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693665 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291687 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm7.831421 0l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm8.151123 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.0217896 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm7.322998 3.9375l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.651062 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.3494263 3.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.49054 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.291687 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm7.09375 -5.109375q0 -2.609375 1.40625 -4.078125q1.40625 -1.484375 3.625 -1.484375q1.453125 0 2.609375 0.703125q1.171875 0.6875 1.78125 1.921875q0.609375 1.234375 0.609375 2.8125q0 1.59375 -0.640625 2.859375q-0.640625 1.265625 -1.828125 1.90625q-1.171875 0.640625 -2.546875 0.640625q-1.46875 0 -2.640625 -0.703125q-1.171875 -0.71875 -1.78125 -1.953125q-0.59375 -1.25 -0.59375 -2.625zm1.4375 0.015625q0 1.90625 1.015625 3.0q1.015625 1.078125 2.5625 1.078125q1.5625 0 2.578125 -1.09375q1.015625 -1.109375 1.015625 -3.125q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.96875 -1.265625 -1.484375q-0.828125 -0.53125 -1.875 -0.53125q-1.46875 0 -2.53125 1.015625q-1.0625 1.015625 -1.0625 3.375zm10.368835 5.09375l0 -10.484375l1.437561 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.500061 -8.25l0 8.25l-1.34375 0zm10.58429 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.537415 0l4.0625 -5.46875l-3.578125 -5.015625l1.65625 0l1.890625 2.6875q0.59375 0.828125 0.84375 1.28125q0.359375 -0.5625 0.84375 -1.1875l2.109375 -2.78125l1.5 0l-3.6875 4.9375l3.984375 5.546875l-1.71875 0l-2.640625 -3.75q-0.21875 -0.3125 -0.46875 -0.6875q-0.34375 0.5625 -0.5 0.78125l-2.625 3.65625l-1.671875 0zm14.722595 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.3806763 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 300.96063l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 300.96063l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m484.35364 324.04062l0 -10.484375l2.078125 0l2.484375 7.421875q0.34375 1.03125 0.5 1.546875q0.1875 -0.5625 0.5625 -1.671875l2.515625 -7.296875l1.859375 0l0 10.484375l-1.328125 0l0 -8.78125l-3.046875 8.78125l-1.265625 0l-3.03125 -8.9375l0 8.9375l-1.328125 0zm12.083771 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.396759 0l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm11.0 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2438354 1.15625l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.5780945 -0.265625 -0.8749695 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.2812195 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.151123 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm10.7699585 0.734375q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619812 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034607 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729187 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm19.11322 2.4375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.072998 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.3637085 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666687 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.651123 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.58374 3.15625l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128845 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51706 354.9895l239.02365 0l0 35.905518l-239.02365 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51706 354.9895l239.02365 0l0 35.905518l-239.02365 0z" fill-rule="evenodd"/><path fill="#000000" d="m488.73734 378.0695l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm13.181854 4.5625q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.6562195 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q0.9999695 -1.0625 2.5780945 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 4.53125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.963623 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm10.409546 -2.953125l0 -1.234375l4.4375 -0.015625l0 3.90625q-1.03125 0.8125 -2.125 1.21875q-1.078125 0.40625 -2.21875 0.40625q-1.546875 0 -2.8125 -0.65625q-1.265625 -0.65625 -1.90625 -1.90625q-0.640625 -1.265625 -0.640625 -2.8125q0 -1.53125 0.640625 -2.84375q0.640625 -1.328125 1.84375 -1.96875q1.203125 -0.65625 2.765625 -0.65625q1.140625 0 2.0625 0.375q0.921875 0.359375 1.4375 1.03125q0.53125 0.65625 0.796875 1.703125l-1.25 0.34375q-0.234375 -0.796875 -0.59375 -1.25q-0.34375 -0.46875 -1.0 -0.734375q-0.65625 -0.28125 -1.4375 -0.28125q-0.953125 0 -1.65625 0.296875q-0.6875 0.28125 -1.125 0.765625q-0.421875 0.46875 -0.65625 1.03125q-0.390625 0.96875 -0.390625 2.109375q0 1.40625 0.46875 2.359375q0.484375 0.9375 1.40625 1.390625q0.9375 0.453125 1.96875 0.453125q0.90625 0 1.765625 -0.34375q0.859375 -0.34375 1.296875 -0.734375l0 -1.953125l-3.078125 0zm6.3063965 4.109375l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849365 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 6.71875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 3.75l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm14.6762085 3.078125q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm2.4274902 -3.078125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666748 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.651123 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.58374 3.15625l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.6875 5.34375l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 405.15485l292.34647 0l0 35.905518l-292.34647 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 405.15485l292.34647 0l0 35.905518l-292.34647 0z" fill-rule="evenodd"/><path fill="#000000" d="m484.4452 428.23486l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.3281555 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.2031555 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693634 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm7.815857 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161865 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.447998 3.8125l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm8.1875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193115 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.166748 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm16.593262 0l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2750854 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.276123 3.796875l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.20929 0l0 -0.953125q-0.71881104 1.125 -2.125061 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53131104 0.28125 0.87506104 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.062561 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.437561 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59381104 -0.734375 -1.484436 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291748 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 502.98163l277.0079 0l0 35.905518l-277.0079 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 502.98163l277.0079 0l0 35.905518l-277.0079 0z" fill-rule="evenodd"/><path fill="#000000" d="m484.49762 526.06165l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.64679 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994843 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.665741 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693665 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291687 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128845 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm12.737732 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.666687 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2405396 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104187 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.6762085 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2751465 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.276062 3.796875l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.20929 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291748 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916687 0.625l1.250061 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.85943604 -1.140625 -0.85943604 -2.71875q0 -1.09375 0.390625 -2.0q0.40631104 -0.921875 1.140686 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.82818604 -0.609375 -0.79693604 -1.84375zm1.062561 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 600.33594l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 600.33594l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m485.3283 623.41595l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.3281555 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.5312805 5.84375l-1.34375 0zm8.693634 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm8.206482 0l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8375854 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm12.035706 8.40625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197998 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.937012 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 699.07874l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 699.07874l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m488.45197 722.15875l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.665741 1.15625l-2.3281555 -7.59375l1.328125 0l1.2031555 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm7.300232 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2125854 -1.625l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.7543335 3.796875l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8375854 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75z" fill-rule="nonzero"/><path fill="#ffffff" d="m188.08005 817.6982l181.95276 0l0 58.04724l-181.95276 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m188.08005 817.6982l181.95276 0l0 58.04724l-181.95276 0z" fill-rule="evenodd"/><path fill="#000000" d="m200.95192 840.77814l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646774 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994858 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657257 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693649 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291733 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.915741 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.6198425 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034576 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.7292175 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128845 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240509 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.1667175 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.3806458 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm14.618866 0.5625l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0z" fill-rule="nonzero"/><path fill="#000000" d="m211.07243 858.77814l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm11.353729 5.5l0 -4.4375l-4.046875 -6.046875l1.6875 0l2.0625 3.15625q0.578125 0.890625 1.0625 1.78125q0.484375 -0.828125 1.15625 -1.859375l2.03125 -3.078125l1.609375 0l-4.1875 6.046875l0 4.4375l-1.375 0zm6.7912292 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm18.553024 -1.125q0.96875 0.671875 1.78125 0.96875l-0.40625 0.96875q-1.125 -0.40625 -2.25 -1.28125q-1.171875 0.640625 -2.578125 0.640625q-1.421875 0 -2.59375 -0.671875q-1.15625 -0.6875 -1.78125 -1.9375q-0.625 -1.25 -0.625 -2.8125q0 -1.546875 0.625 -2.8125q0.640625 -1.28125 1.796875 -1.9375q1.171875 -0.671875 2.609375 -0.671875q1.453125 0 2.609375 0.6875q1.171875 0.6875 1.78125 1.9375q0.609375 1.234375 0.609375 2.796875q0 1.296875 -0.390625 2.328125q-0.390625 1.03125 -1.1875 1.796875zm-3.046875 -1.765625q1.203125 0.328125 1.984375 1.0q1.21875 -1.125 1.21875 -3.359375q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.953125 -1.265625 -1.46875q-0.828125 -0.53125 -1.875 -0.53125q-1.546875 0 -2.578125 1.0625q-1.015625 1.0625 -1.015625 3.171875q0 2.046875 1.015625 3.140625q1.015625 1.09375 2.578125 1.09375q0.75 0 1.40625 -0.265625q-0.65625 -0.421875 -1.375 -0.609375l0.34375 -1.0zm13.565826 2.890625l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm6.3544006 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm5.365509 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.171875 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm8.229248 3.8125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm6.8073425 0q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm11.3793335 6.703125l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.979248 3.75l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.4118958 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 -5.203125l0 -1.484375l1.296875 0l0 1.484375l-1.296875 0zm-1.625 11.953125l0.25 -1.09375q0.375 0.09375 0.59375 0.09375q0.40625 0 0.59375 -0.265625q0.1875 -0.25 0.1875 -1.296875l0 -7.984375l1.296875 0l0 8.015625q0 1.390625 -0.375 1.953125q-0.453125 0.703125 -1.53125 0.703125q-0.53125 0 -1.015625 -0.125zm10.099915 -5.40625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135468 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm5.1875 1.625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875z" fill-rule="nonzero"/><path fill="#ffffff" d="m188.08136 970.8018l72.62991 0l0 35.905518l-72.62991 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m188.08136 970.8018l72.62991 0l0 35.905518l-72.62991 0z" fill-rule="evenodd"/><path fill="#000000" d="m207.5549 993.88184l0 -10.484375l3.9375 0q1.203125 0 1.921875 0.3125q0.734375 0.3125 1.140625 0.984375q0.40625 0.65625 0.40625 1.375q0 0.671875 -0.375 1.265625q-0.359375 0.59375 -1.09375 0.96875q0.953125 0.28125 1.46875 0.953125q0.515625 0.671875 0.515625 1.578125q0 0.75 -0.3125 1.390625q-0.3125 0.625 -0.78125 0.96875q-0.453125 0.34375 -1.140625 0.515625q-0.6875 0.171875 -1.6875 0.171875l-4.0 0zm1.390625 -6.078125l2.265625 0q0.921875 0 1.3125 -0.125q0.53125 -0.15625 0.796875 -0.515625q0.28125 -0.375 0.28125 -0.921875q0 -0.53125 -0.25 -0.921875q-0.25 -0.390625 -0.71875 -0.53125q-0.46875 -0.15625 -1.59375 -0.15625l-2.09375 0l0 3.171875zm0 4.84375l2.609375 0q0.671875 0 0.9375 -0.046875q0.484375 -0.09375 0.796875 -0.296875q0.328125 -0.203125 0.53125 -0.578125q0.21875 -0.390625 0.21875 -0.890625q0 -0.578125 -0.3125 -1.015625q-0.296875 -0.4375 -0.828125 -0.609375q-0.53125 -0.171875 -1.53125 -0.171875l-2.421875 0l0 3.609375zm8.275604 -7.78125l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686493 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.5719757 1.15625l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.7594757 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2248993 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.490524 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 971.55646l174.01578 0l0 35.905457l-174.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 971.55646l174.01578 0l0 35.905457l-174.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m494.10754 994.6364l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm11.353729 5.5l0 -4.4375l-4.046875 -6.046875l1.6875 0l2.0625 3.15625q0.578125 0.890625 1.0625 1.78125q0.484375 -0.828125 1.15625 -1.859375l2.03125 -3.078125l1.6094055 0l-4.1875305 6.046875l0 4.4375l-1.375 0zm6.7912292 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm18.55304 -1.125q0.96875 0.671875 1.78125 0.96875l-0.40625 0.96875q-1.125 -0.40625 -2.25 -1.28125q-1.171875 0.640625 -2.578125 0.640625q-1.421875 0 -2.59375 -0.671875q-1.15625 -0.6875 -1.78125 -1.9375q-0.625 -1.25 -0.625 -2.8125q0 -1.546875 0.625 -2.8125q0.640625 -1.28125 1.796875 -1.9375q1.171875 -0.671875 2.609375 -0.671875q1.453125 0 2.609375 0.6875q1.171875 0.6875 1.78125 1.9375q0.609375 1.234375 0.609375 2.796875q0 1.296875 -0.390625 2.328125q-0.390625 1.03125 -1.1875 1.796875zm-3.046875 -1.765625q1.203125 0.328125 1.984375 1.0q1.21875 -1.125 1.21875 -3.359375q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.953125 -1.265625 -1.46875q-0.828125 -0.53125 -1.875 -0.53125q-1.546875 0 -2.578125 1.0625q-1.015625 1.0625 -1.015625 3.171875q0 2.046875 1.015625 3.140625q1.015625 1.09375 2.578125 1.09375q0.75 0 1.40625 -0.265625q-0.65625 -0.421875 -1.375 -0.609375l0.34375 -1.0zm10.565796 2.890625l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.16626 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.203125 -4.078125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.666748 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm17.176208 -2.78125l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm1.890625 -1.015625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494812 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m835.2677 303.958l296.75598 0l0 29.921265l-296.75598 0z" fill-rule="evenodd"/><path fill="#e06666" d="m844.9396 326.53424l2.625 -0.25q0.234375 1.3125 0.953125 1.9375q0.734375 0.609375 1.96875 0.609375q1.296875 0 1.953125 -0.546875q0.671875 -0.546875 0.671875 -1.28125q0 -0.484375 -0.28125 -0.8125q-0.28125 -0.328125 -0.96875 -0.578125q-0.484375 -0.15625 -2.171875 -0.578125q-2.15625 -0.546875 -3.03125 -1.328125q-1.234375 -1.09375 -1.234375 -2.6875q0 -1.015625 0.578125 -1.90625q0.578125 -0.890625 1.65625 -1.34375q1.09375 -0.46875 2.640625 -0.46875q2.515625 0 3.78125 1.109375q1.28125 1.09375 1.34375 2.9375l-2.703125 0.109375q-0.171875 -1.03125 -0.75 -1.46875q-0.5625 -0.453125 -1.703125 -0.453125q-1.171875 0 -1.84375 0.46875q-0.421875 0.3125 -0.421875 0.84375q0 0.46875 0.40625 0.796875q0.5 0.4375 2.46875 0.90625q1.96875 0.453125 2.90625 0.953125q0.953125 0.5 1.484375 1.359375q0.53125 0.859375 0.53125 2.125q0 1.15625 -0.640625 2.15625q-0.640625 1.0 -1.8125 1.484375q-1.15625 0.484375 -2.890625 0.484375q-2.53125 0 -3.890625 -1.171875q-1.359375 -1.171875 -1.625 -3.40625zm13.1154785 -6.640625l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.9801636 -9.671875l2.359375 0l0 1.3125q1.265625 -1.53125 3.015625 -1.53125q0.9375 0 1.609375 0.390625q0.6875 0.375 1.125 1.140625q0.640625 -0.765625 1.375 -1.140625q0.75 -0.390625 1.578125 -0.390625q1.0625 0 1.796875 0.4375q0.75 0.421875 1.109375 1.265625q0.265625 0.625 0.265625 2.0l0 6.1875l-2.5625 0l0 -5.53125q0 -1.4375 -0.265625 -1.859375q-0.34375 -0.546875 -1.09375 -0.546875q-0.53125 0 -1.015625 0.328125q-0.46875 0.328125 -0.671875 0.96875q-0.203125 0.625 -0.203125 2.0l0 4.640625l-2.5625 0l0 -5.296875q0 -1.421875 -0.140625 -1.828125q-0.140625 -0.40625 -0.421875 -0.609375q-0.28125 -0.203125 -0.78125 -0.203125q-0.59375 0 -1.0625 0.328125q-0.46875 0.3125 -0.6875 0.921875q-0.203125 0.59375 -0.203125 1.984375l0 4.703125l-2.5625 0l0 -9.671875zm23.150879 9.671875l0 -1.453125q-0.53125 0.78125 -1.390625 1.234375q-0.859375 0.4375 -1.8125 0.4375q-0.96875 0 -1.75 -0.421875q-0.765625 -0.4375 -1.125 -1.203125q-0.34375 -0.78125 -0.34375 -2.140625l0 -6.125l2.5625 0l0 4.4375q0 2.046875 0.140625 2.515625q0.140625 0.453125 0.515625 0.71875q0.375 0.265625 0.953125 0.265625q0.65625 0 1.171875 -0.359375q0.515625 -0.359375 0.703125 -0.890625q0.203125 -0.53125 0.203125 -2.609375l0 -4.078125l2.546875 0l0 9.671875l-2.375 0zm5.036621 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm7.0895386 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm9.485107 -5.03125l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm1.7752075 -1.3125l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.5895386 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm18.161621 4.84375l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625zm19.610535 -1.625l-1.546875 1.96875q-1.140625 -0.546875 -2.1875 -1.53125q-0.8125 0.734375 -1.71875 1.078125q-0.90625 0.34375 -2.171875 0.34375q-2.5 0 -3.75 -1.40625q-0.984375 -1.078125 -0.984375 -2.484375q0 -1.296875 0.765625 -2.3125q0.765625 -1.03125 2.296875 -1.78125q-0.6875 -0.8125 -1.03125 -1.546875q-0.34375 -0.734375 -0.34375 -1.390625q0 -1.203125 0.96875 -2.046875q0.96875 -0.84375 2.765625 -0.84375q1.734375 0 2.703125 0.890625q0.96875 0.875 0.96875 2.140625q0 0.8125 -0.484375 1.546875q-0.46875 0.734375 -1.9375 1.65625l1.859375 2.4375q0.328125 -0.578125 0.5625 -1.515625l2.3125 0.53125q-0.34375 1.21875 -0.609375 1.796875q-0.25 0.5625 -0.546875 0.953125q0.4375 0.40625 1.109375 0.890625q0.6875 0.46875 1.0 0.625zm-6.96875 -7.265625l0.6875 -0.53125q0.765625 -0.59375 0.765625 -1.171875q0 -0.5 -0.375 -0.84375q-0.359375 -0.34375 -0.984375 -0.34375q-0.609375 0 -0.96875 0.3125q-0.34375 0.296875 -0.34375 0.703125q0 0.46875 0.59375 1.140625l0.625 0.734375zm-1.0 2.78125q-0.890625 0.4375 -1.328125 1.078125q-0.4375 0.625 -0.4375 1.28125q0 0.828125 0.546875 1.359375q0.546875 0.515625 1.453125 0.515625q0.609375 0 1.15625 -0.234375q0.5625 -0.234375 1.21875 -0.78125l-2.609375 -3.21875zm14.8125 6.109375l0 -13.359375l9.90625 0l0 2.265625l-7.21875 0l0 2.953125l6.71875 0l0 2.25l-6.71875 0l0 3.640625l7.46875 0l0 2.25l-10.15625 0zm12.2247925 -9.671875l2.359375 0l0 1.3125q1.265625 -1.53125 3.015625 -1.53125q0.9375 0 1.609375 0.390625q0.6875 0.375 1.125 1.140625q0.640625 -0.765625 1.375 -1.140625q0.75 -0.390625 1.578125 -0.390625q1.0625 0 1.796875 0.4375q0.75 0.421875 1.109375 1.265625q0.265625 0.625 0.265625 2.0l0 6.1875l-2.5625 0l0 -5.53125q0 -1.4375 -0.265625 -1.859375q-0.34375 -0.546875 -1.09375 -0.546875q-0.53125 0 -1.015625 0.328125q-0.46875 0.328125 -0.671875 0.96875q-0.203125 0.625 -0.203125 2.0l0 4.640625l-2.5625 0l0 -5.296875q0 -1.421875 -0.140625 -1.828125q-0.140625 -0.40625 -0.421875 -0.609375q-0.28125 -0.203125 -0.78125 -0.203125q-0.59375 0 -1.0625 0.328125q-0.46875 0.3125 -0.6875 0.921875q-0.203125 0.59375 -0.203125 1.984375l0 4.703125l-2.5625 0l0 -9.671875zm23.150879 9.671875l0 -1.453125q-0.53125 0.78125 -1.390625 1.234375q-0.859375 0.4375 -1.8125 0.4375q-0.96875 0 -1.75 -0.421875q-0.765625 -0.4375 -1.125 -1.203125q-0.34375 -0.78125 -0.34375 -2.140625l0 -6.125l2.5625 0l0 4.4375q0 2.046875 0.140625 2.515625q0.140625 0.453125 0.515625 0.71875q0.375 0.265625 0.953125 0.265625q0.65625 0 1.171875 -0.359375q0.515625 -0.359375 0.703125 -0.890625q0.203125 -0.53125 0.203125 -2.609375l0 -4.078125l2.546875 0l0 9.671875l-2.375 0zm5.036621 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm7.0895386 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm9.485168 -5.03125l0 2.03125l-1.750061 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.43756104 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015686 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.750061 0zm1.7751465 -1.3125l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.5894775 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm18.161621 4.84375l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625zm7.8137207 0l0 -13.359375l9.15625 0l0 2.265625l-6.453125 0l0 3.15625l5.5625 0l0 2.265625l-5.5625 0l0 5.671875l-2.703125 0zm11.364746 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm4.5894775 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm11.161621 4.84375l-3.0625 -9.671875l2.484375 0l1.8125 6.34375l1.671875 -6.34375l2.46875 0l1.609375 6.34375l1.859375 -6.34375l2.515625 0l-3.109375 9.671875l-2.453125 0l-1.671875 -6.21875l-1.640625 6.21875l-2.484375 0zm11.80835 -2.765625l2.5625 -0.390625q0.171875 0.75 0.671875 1.140625q0.5 0.390625 1.40625 0.390625q0.984375 0 1.484375 -0.375q0.34375 -0.25 0.34375 -0.671875q0 -0.296875 -0.1875 -0.484375q-0.1875 -0.1875 -0.859375 -0.34375q-3.09375 -0.6875 -3.921875 -1.25q-1.140625 -0.78125 -1.140625 -2.171875q0 -1.265625 0.984375 -2.109375q1.0 -0.859375 3.078125 -0.859375q1.984375 0 2.953125 0.65625q0.96875 0.640625 1.328125 1.90625l-2.40625 0.4375q-0.15625 -0.5625 -0.59375 -0.859375q-0.421875 -0.296875 -1.234375 -0.296875q-1.0 0 -1.4375 0.28125q-0.296875 0.203125 -0.296875 0.515625q0 0.265625 0.25 0.46875q0.34375 0.25 2.390625 0.71875q2.046875 0.453125 2.859375 1.140625q0.796875 0.671875 0.796875 1.890625q0 1.34375 -1.109375 2.296875q-1.109375 0.953125 -3.28125 0.953125q-1.984375 0 -3.140625 -0.796875q-1.140625 -0.8125 -1.5 -2.1875z" fill-rule="nonzero"/><path fill="#e06666" d="m939.8833 346.77142l0 0c0 -3.797943 3.0787964 -6.87677 6.87677 -6.87677l278.59283 0c1.8238525 0 3.572998 0.7244873 4.862671 2.0141602c1.2896729 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.876831 6.87677l-278.59283 0c-3.7979736 0 -6.87677 -3.078827 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m966.4187 339.89105l239.27563 0l0 32.97638l-239.27563 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m1008.09283 359.59607l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm10.088135 -5.640625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.60943604 -0.34375 1.359436 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015686 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193176 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2595215 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717285 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm17.207397 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.651123 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2562256 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm11.551147 3.9375l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.16626 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.203125 -4.078125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2595215 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.666626 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0z" fill-rule="nonzero"/><path fill="#e06666" d="m939.8833 447.19138l0 0c0 -3.797943 3.0787964 -6.8768005 6.87677 -6.8768005l278.59283 0c1.8238525 0 3.572998 0.7245178 4.862671 2.0141602c1.2896729 1.2896729 2.0141602 3.0387878 2.0141602 4.8626404l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.876831 6.87677l-278.59283 0c-3.7979736 0 -6.87677 -3.078827 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m966.4187 440.311l239.27563 0l0 32.97638l-239.27563 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m1035.8632 463.39102l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.213135 5.5l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm11.947998 2.8125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm16.207397 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 5.6875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 6.65625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.479248 1.484375l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2562256 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#e06666" d="m1137.4801 656.1574l0 0c0 -4.2473145 3.4431152 -7.6904297 7.6904297 -7.6904297l122.79236 0c2.0396729 0 3.9957275 0.8102417 5.4379883 2.2525024c1.4422607 1.4421997 2.2524414 3.3983154 2.2524414 5.4379272l0 30.760864c0 4.2473145 -3.4431152 7.6904297 -7.6904297 7.6904297l-122.79236 0l0 0c-4.2473145 0 -7.6904297 -3.4431152 -7.6904297 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1141.1812 644.96716l130.7716 0l0 36.472412l-130.7716 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m1162.0844 668.0472l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.213135 5.5l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm11.947998 2.8125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.23877 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.70874 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2281494 1.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7718506 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m1159.8834 689.1253q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm2.4276123 -3.078125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666626 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.682373 1.375l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm6.568115 3.15625l-1.203125 0l0 -10.484375l1.296875 0l0 3.734375q0.8125 -1.015625 2.078125 -1.015625q0.703125 0 1.328125 0.28125q0.625 0.28125 1.03125 0.796875q0.40625 0.5 0.625 1.234375q0.234375 0.71875 0.234375 1.53125q0 1.96875 -0.96875 3.03125q-0.953125 1.0625 -2.3125 1.0625q-1.34375 0 -2.109375 -1.125l0 0.953125zm-0.015625 -3.859375q0 1.375 0.375 1.984375q0.609375 0.984375 1.640625 0.984375q0.84375 0 1.453125 -0.734375q0.625 -0.734375 0.625 -2.1875q0 -1.484375 -0.59375 -2.1875q-0.59375 -0.71875 -1.421875 -0.71875q-0.84375 0 -1.46875 0.734375q-0.609375 0.734375 -0.609375 2.125zm6.916748 6.78125l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm6.890625 -6.078125l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.380615 3.15625l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666748 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm8.026123 7.609375l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#e06666" d="m897.4632 655.9718l0 0c0 -4.2473145 3.4431152 -7.6904297 7.6904297 -7.6904297l122.79242 0c2.0395508 0 3.9957275 0.8102417 5.437866 2.2525024c1.4422607 1.4421997 2.2525635 3.3983154 2.2525635 5.4379272l0 30.760864c0 4.2473145 -3.4431152 7.6904297 -7.6904297 7.6904297l-122.79242 0l0 0c-4.2473145 0 -7.6904297 -3.4431152 -7.6904297 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m901.164 645.1514l130.77167 0l0 40.503967l-130.77167 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m922.06726 668.23145l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.213074 5.5l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm11.947937 2.8125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.2387085 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.70874 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2282104 1.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7717896 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m939.8362 689.3096q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm1.9119263 -5.34375l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2125854 -1.625l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm8.135437 6.875l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#e06666" d="m939.88336 822.0936l0 0c0 -3.7979126 3.0787964 -6.87677 6.87677 -6.87677l278.5929 0c1.8238525 0 3.572998 0.7244873 4.862671 2.0141602c1.2895508 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.876831 6.87677l-278.5929 0c-3.7979736 0 -6.87677 -3.0788574 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m947.6798 815.51447l277.0078 0l0 24.944824l-277.0078 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m973.3106 838.5944l0 -10.484375l7.59375 0l0 1.234375l-6.203125 0l0 3.203125l5.796875 0l0 1.234375l-5.796875 0l0 3.578125l6.4375 0l0 1.234375l-7.828125 0zm9.588135 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193115 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717896 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm14.676147 3.078125q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm2.4118652 -3.078125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7088623 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2281494 1.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7718506 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm13.05249 3.078125l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625zm13.093262 -3.078125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.651123 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm11.55127 3.9375l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.16626 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.546875 -2.921875l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm11.013184 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2248535 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240601 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7750244 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#e06666" d="m939.8833 545.404l0 0c0 -3.7979736 3.0787964 -6.876831 6.87677 -6.876831l278.59283 0c1.8238525 0 3.572998 0.72454834 4.862671 2.0141602c1.2896729 1.2896729 2.0141602 3.0388184 2.0141602 4.862671l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.876831 6.87677l-278.59283 0c-3.7979736 0 -6.87677 -3.0788574 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m966.4187 538.5236l239.27563 0l0 32.97638l-239.27563 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m986.92786 558.22864l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm10.088135 -5.640625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193115 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2593994 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7718506 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm14.676147 3.078125q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm7.380615 -5.859375l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 5.6875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 6.65625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.479248 1.484375l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm13.052612 3.078125l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625zm13.093262 -3.078125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.651001 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2562256 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm19.035522 0.265625l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm5.6468506 1.96875l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0zm8.55896 0l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m279.025 12.011797l0.03149414 45.66929" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.02502 12.011797l0.02734375 39.66929" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.40063 51.682228l1.6548767 4.536957l1.6485901 -4.539234z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 98.93976l0 57.259842" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 98.93976l0 51.259842" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 150.1996l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 196.99619l0 57.259842" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 196.99619l0 51.259842" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 248.25603l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.04074 295.2075l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.04077 295.20746l0 51.259857" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.38904 346.46732l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 393.41876l0 57.259857" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 393.41876l0 51.259827" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 444.6786l1.6517334 4.5381165l1.6517334 -4.5381165z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.04074 491.62872l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.04077 491.62872l0 51.259827" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.38904 542.88855l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 589.8413l0 57.259888" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 589.8413l0 51.259888" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 641.1012l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 687.89777l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 687.89777l0 51.259888" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 739.15765l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m160.92908 785.225l0 122.17322" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m160.92908 785.225l0 116.17322" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.27734 901.3982l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m401.9462 785.6417l0 121.3858" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m401.9462 785.6417l0 115.385864" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m400.29446 901.0276l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m161.2598 956.2971l-0.6614227 66.42523" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m161.2598 956.2972l-0.6016846 60.425537" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.00647 1016.70624l1.6064606 4.5543213l1.6968384 -4.5214844z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m401.94745 954.78796l-0.6614075 66.42517" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m401.94748 954.78796l-0.6016846 60.425537" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m399.69415 1015.197l1.6064453 4.5543213l1.6968384 -4.5214233z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m158.72704 988.7546l29.354324 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m162.51703 988.7546l21.774338 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.22704 988.7546c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m187.58136 988.7546c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m158.73753 846.3123l29.354324 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m162.52753 846.3123l21.774338 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.23753 846.3123c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m187.59186 846.3123c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m370.0328 846.7218l33.984253 0.03149414" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m373.82278 846.72534l26.404297 0.024414062" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m370.5328 846.7222c8.544922E-4 -0.9085083 0.7380066 -1.6442871 1.6465149 -1.6434326c0.9085083 7.9345703E-4 1.6443176 0.7379761 1.6434937 1.6464844c-8.544922E-4 0.9085083 -0.7380371 1.6443481 -1.6465454 1.6434937c-0.9085083 -8.544922E-4 -1.6443176 -0.7380371 -1.6434631 -1.6465454z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m403.51706 846.7528c-8.544922E-4 0.9085083 -0.7380066 1.6443481 -1.6465149 1.6434937c-0.9085083 -8.544922E-4 -1.6443176 -0.7380371 -1.6434937 -1.6465454c8.544922E-4 -0.9085083 0.7380371 -1.6442871 1.6465454 -1.6434326c0.9085083 7.9345703E-4 1.6443176 0.7379761 1.6434631 1.6464844z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m399.25198 990.2651l70.2677 -0.7558594" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m403.04175 990.2243l62.68817 -0.67437744" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m399.75195 990.2597c-0.009796143 -0.90844727 0.71875 -1.652832 1.6271973 -1.6625977c0.90844727 -0.009765625 1.652832 0.71875 1.6625977 1.6271973c0.009765625 0.90844727 -0.71875 1.652832 -1.6271973 1.6625977c-0.9084778 0.009765625 -1.652832 -0.71875 -1.6625977 -1.6271973z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.0197 989.5146c0.009765625 0.90844727 -0.71875 1.652832 -1.6271973 1.6625977c-0.9084778 0.009765625 -1.652832 -0.71875 -1.6625977 -1.6272583c-0.009796143 -0.90844727 0.71875 -1.652771 1.6271973 -1.6625977c0.90844727 -0.009765625 1.652832 0.71875 1.6625977 1.6272583z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5118 715.9921l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.30176 716.0126l185.42798 0.9984741" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0118 715.9948c0.0048828125 -0.9085083 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9084778 0.0048828125 1.6409912 0.7453613 1.6361084 1.6538086c-0.0048828125 0.9085083 -0.7453308 1.6410522 -1.6538391 1.6361694c-0.9084778 -0.0049438477 -1.6410217 -0.7453613 -1.6361084 -1.6538696z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01968 717.0288c-0.0048828125 0.9085083 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6409912 -0.7453613 -1.6361084 -1.6538086c0.0048828125 -0.9085083 0.7453308 -1.6410522 1.6538391 -1.6361694c0.9085083 0.0049438477 1.6410217 0.7453613 1.6361084 1.6538696z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 617.9488l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 617.96924l185.42798 0.99853516" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 617.95154c0.0048828125 -0.9085083 0.7453308 -1.6410522 1.6538391 -1.6361694c0.9084778 0.0049438477 1.6409912 0.7453613 1.6361084 1.6538696c-0.0048828125 0.9085083 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453613 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 618.9855c-0.0048828125 0.9085083 -0.7453308 1.6410522 -1.6538391 1.6361694c-0.9084778 -0.0049438477 -1.6409912 -0.7453613 -1.6361084 -1.6538696c0.0048828125 -0.9085083 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9085083 0.0048828125 1.6410217 0.7453613 1.6361084 1.6538086z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 519.7402l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 519.76056l185.42798 0.99853516" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 519.74286c0.0048828125 -0.9085083 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9084778 0.0048828125 1.6409912 0.7453003 1.6361084 1.6538086c-0.0048828125 0.9085083 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453003 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 520.77686c-0.0048828125 0.90844727 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6409912 -0.7453613 -1.6361084 -1.6538696c0.0048828125 -0.90844727 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9085083 0.0048828125 1.6410217 0.7453613 1.6361084 1.6538696z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 420.19028l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 420.21072l185.42798 0.99850464" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 420.193c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9084778 0.00491333 1.6409912 0.7453613 1.6361084 1.6538391c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453308 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 421.22696c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361389c-0.9084778 -0.00491333 -1.6409912 -0.7453613 -1.6361084 -1.6538391c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9085083 0.00491333 1.6410217 0.7453613 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 317.64172l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 317.66214l185.42798 0.9985657" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 317.64444c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9084778 0.00491333 1.6409912 0.7453308 1.6361084 1.6538391c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453308 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 318.6784c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6409912 -0.7453308 -1.6361084 -1.6538086c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9085083 0.00491333 1.6410217 0.7453613 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 225.1063l193.00787 1.0393829" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 225.12671l185.42798 0.9985504" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 225.109c0.0048828125 -0.90849304 0.7453308 -1.6410065 1.6538391 -1.6361237c0.9084778 0.0048980713 1.6409912 0.74534607 1.6361084 1.6538391c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361237c-0.9084778 -0.0048980713 -1.6410217 -0.74534607 -1.6361084 -1.6538391z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 226.14297c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361237c-0.9084778 -0.0048980713 -1.6409912 -0.74534607 -1.6361084 -1.6538391c0.0048828125 -0.90849304 0.7453308 -1.6410065 1.6538391 -1.6361237c0.9085083 0.0048980713 1.6410217 0.74534607 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 130.81102l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 130.83144l185.42798 0.9985504" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 130.81372c0.0048828125 -0.90849304 0.7453308 -1.6410217 1.6538391 -1.6361237c0.9084778 0.0048980713 1.6409912 0.74534607 1.6361084 1.6538391c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453308 -1.6361084 -1.6538239z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 131.8477c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361237c-0.9084778 -0.0048980713 -1.6409912 -0.74534607 -1.6361084 -1.6538391c0.0048828125 -0.90849304 0.7453308 -1.6410065 1.6538391 -1.6361237c0.9085083 0.0048980713 1.6410217 0.74534607 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 37.1916l193.00787 1.0393715" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 37.212013l185.42798 0.9985466" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 37.194294c0.0048828125 -0.90849686 0.7453308 -1.6410103 1.6538391 -1.6361198c0.9084778 0.0048942566 1.6409912 0.74534225 1.6361084 1.6538353c-0.0048828125 0.90849686 -0.7453308 1.6410103 -1.6538391 1.6361198c-0.9084778 -0.0048942566 -1.6410217 -0.74534225 -1.6361084 -1.6538353z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 38.22828c-0.0048828125 0.90849304 -0.7453308 1.6410103 -1.6538391 1.636116c-0.9084778 -0.004890442 -1.6409912 -0.74533844 -1.6361084 -1.6538315c0.0048828125 -0.90849686 0.7453308 -1.6410103 1.6538391 -1.6361198c0.9085083 0.0048942566 1.6410217 0.74533844 1.6361084 1.6538353z" fill-rule="nonzero"/><path fill="#ffffff" d="m1275.6536 486.28873l174.01575 0l0 49.637787l-174.01575 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m1275.6536 486.28873l174.01575 0l0 49.637787l-174.01575 0z" fill-rule="evenodd"/><path fill="#000000" d="m1290.8724 509.3687l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.6467285 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.9157715 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.4904785 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128906 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#000000" d="m1303.9425 527.3687l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm9.1779785 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686035 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2595215 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm19.86377 -3.671875l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm5.6467285 1.96875l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0zm8.55896 0l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0zm11.3342285 1.703125l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm9.1623535 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849365 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 6.71875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 6.65625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197998 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m1275.6536 725.4934l174.01575 0l0 49.637817l-174.01575 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m1275.6536 725.4934l174.01575 0l0 49.637817l-174.01575 0z" fill-rule="evenodd"/><path fill="#000000" d="m1290.8724 748.5734l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.6467285 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.9157715 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.4904785 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128906 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#000000" d="m1297.4269 766.5734l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm9.1779785 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0687256 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2593994 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm15.379395 0l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm11.013184 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2249756 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751465 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.96814 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm11.724487 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494751 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7718506 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m996.65356 725.85565l121.35437 0l0 48.91339l-121.35437 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m996.65356 725.85565l121.35437 0l0 48.91339l-121.35437 0z" fill-rule="evenodd"/><path fill="#000000" d="m1019.03064 748.93567l0 -10.484375l7.078125 0l0 1.234375l-5.6875 0l0 3.25l4.921875 0l0 1.234375l-4.921875 0l0 4.765625l-1.390625 0zm13.702637 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.2561035 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm7.359375 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0z" fill-rule="nonzero"/><path fill="#000000" d="m1012.578 766.93567l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm11.013184 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2249756 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751465 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291626 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.968262 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm11.724487 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229126 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m425.22974 372.94266l44.283447 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m425.22974 372.94266l38.283478 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m463.5132 374.5944l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1086.0565 482.4765l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m1086.0565 482.4765l0 51.259827" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1084.4048 533.7363l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1082.6456 509.52756l193.00793 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m1086.4357 509.54797l185.42798 0.99853516" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1083.1456 509.53024c0.0048828125 -0.9084778 0.7453613 -1.6409912 1.6538086 -1.6361084c0.90856934 0.0048828125 1.6411133 0.7453308 1.6362305 1.6538391c-0.005004883 0.9084778 -0.7453613 1.6409912 -1.6539307 1.6361084c-0.90844727 -0.0048828125 -1.6409912 -0.7453308 -1.6361084 -1.6538391z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1275.1536 510.56424c-0.0048828125 0.9084778 -0.7453613 1.6410217 -1.6538086 1.6361389c-0.90856934 -0.0049438477 -1.6409912 -0.7453613 -1.6361084 -1.6538696c0.0048828125 -0.9084778 0.74523926 -1.6409912 1.6538086 -1.6361084c0.90844727 0.0048828125 1.6409912 0.7453308 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m966.54724 694.4238l0 121.385864" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m966.54724 694.4239l0 115.3858" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m964.8955 809.8097l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1206.5657 694.4238l0 121.385864" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m1206.5657 694.4239l0 115.3858" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1204.914 809.8097l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m962.66797 750.2966l33.984253 0.03149414" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m966.458 750.3001l26.404175 0.024475098" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m963.16797 750.29706c8.544922E-4 -0.9085083 0.7380371 -1.6443481 1.6465454 -1.6434937c0.9085083 8.544922E-4 1.6442871 0.7380371 1.6434326 1.6465454c-7.9345703E-4 0.9085083 -0.7379761 1.6442871 -1.6464844 1.6434937c-0.9085083 -8.544922E-4 -1.6443481 -0.7380371 -1.6434937 -1.6465454z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m996.1522 750.32764c-8.544922E-4 0.9085083 -0.7379761 1.6442871 -1.6464844 1.6434326c-0.9085083 -7.9345703E-4 -1.6443481 -0.7379761 -1.6434937 -1.6464844c8.544922E-4 -0.9085083 0.7379761 -1.6443481 1.6464844 -1.6434937c0.9085083 8.544922E-4 1.6443481 0.7380371 1.6434937 1.6465454z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m1202.8976 750.6903l72.75598 -0.37799072" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m1206.6876 750.6706l65.176025 -0.338562" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1203.3976 750.6877c-0.004638672 -0.9085083 0.72802734 -1.6488037 1.6364746 -1.6535034c0.90844727 -0.004760742 1.6488037 0.7279053 1.6535645 1.6364136c0.004638672 0.9085083 -0.72802734 1.6488037 -1.6364746 1.6535034c-0.90844727 0.004760742 -1.6488037 -0.7279053 -1.6535645 -1.6364136z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1275.1536 750.31494c0.004760742 0.9085083 -0.7279053 1.6488037 -1.6364746 1.6535034c-0.90844727 0.004760742 -1.6488037 -0.7279053 -1.6534424 -1.6364136c-0.004760742 -0.9085083 0.7279053 -1.6488037 1.6363525 -1.6535034c0.90856934 -0.004760742 1.6488037 0.7279053 1.6535645 1.6364136z" fill-rule="nonzero"/></g></svg>
+<svg version="1.1" viewBox="0.0 0.0 1459.1627296587926 1074.1811023622047" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l1459.1627 0l0 1074.1812l-1459.1627 0l0 -1074.1812z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l1459.1627 0l0 1074.1812l-1459.1627 0z" fill-rule="evenodd"/><path fill="#b6d7a8" d="m85.519684 609.74146l0 0c0 -36.312317 29.436989 -65.74933 65.749344 -65.74933l262.9895 0l0 0c17.437836 0 34.161438 6.927124 46.49182 19.257507c12.330383 12.330444 19.257538 29.054016 19.257538 46.49182l0 331.96588c0 36.312378 -29.436981 65.74933 -65.74936 65.74933l-262.9895 0c-36.312355 0 -65.749344 -29.43695 -65.749344 -65.74933z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m85.519684 609.74146l0 0c0 -36.312317 29.436989 -65.74933 65.749344 -65.74933l262.9895 0l0 0c17.437836 0 34.161438 6.927124 46.49182 19.257507c12.330383 12.330444 19.257538 29.054016 19.257538 46.49182l0 331.96588c0 36.312378 -29.436981 65.74933 -65.74936 65.74933l-262.9895 0c-36.312355 0 -65.749344 -29.43695 -65.749344 -65.74933z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m823.8117 436.36414l0 0c-5.5324097 -67.94034 12.631531 -135.19711 46.7843 -173.23087c34.15271 -38.033737 78.304504 -40.174118 113.72015 -5.512909l0 0c12.545288 -39.50331 35.506897 -66.77763 61.93933 -73.573c26.432373 -6.7953644 53.23108 7.686371 72.28992 39.06476l0 0c10.68689 -35.816254 31.67102 -59.880203 55.50598 -63.652695c23.83496 -3.7725067 47.14746 13.28035 61.66516 45.1073l0 0c19.307617 -37.965134 50.02649 -53.950027 78.8645 -41.037933c28.83789 12.912079 50.615356 52.40204 55.909058 101.38234l0 0c23.65503 10.782257 43.359253 38.1922 54.021606 75.14795c10.6623535 36.95575 11.236938 79.83206 1.5753174 117.551056l0 0c23.293335 50.660706 28.742188 118.1655 14.313232 177.32278c-14.428955 59.157288 -46.568115 101.079346 -84.423584 110.12164c-0.26672363 55.52124 -18.488403 106.4668 -47.64148 133.20013c-29.153076 26.733337 -64.68518 25.079895 -92.900635 -4.323059c-12.018311 66.49628 -45.845947 115.423645 -86.86792 125.643616c-41.022095 10.21991 -81.88464 -20.09961 -104.93353 -77.859436c-28.253052 28.470032 -62.15442 36.671326 -94.05664 22.753845c-31.90216 -13.917419 -59.11786 -48.78131 -75.50775 -96.72699l0 0c-28.871155 5.645691 -56.785522 -19.350464 -69.88934 -62.582947c-13.103821 -43.232544 -8.607666 -95.49823 11.257141 -130.85791l0 0c-25.753784 -25.329956 -38.894897 -75.59302 -32.5708 -124.57892c6.3240967 -48.98593 30.680359 -85.59436 60.36798 -90.73541z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m795.4438 654.297l0 0c12.15332 11.953308 26.193237 17.37555 40.23462 15.538818m18.391296 177.90631c6.03833 -1.1807861 11.957031 -3.6812744 17.603455 -7.4369507m151.95251 81.40704c-4.2469482 -10.642822 -7.8027344 -22.015564 -10.6067505 -33.9245m202.4154 -13.861694l0 0c2.191162 -12.12323 3.6108398 -24.600708 4.2353516 -37.224182m136.30164 -91.648254c0.28405762 -59.1109 -19.807129 -113.23346 -51.6438 -139.12042m121.75342 -148.31775c-5.155884 20.128387 -13.026855 37.98398 -22.99585 52.16684m-32.596313 -244.87112l0 0c0.87854004 8.128296 1.2850342 16.378754 1.2141113 24.637024m-135.98535 -84.97896l0 0c-4.816284 9.470413 -8.78418 20.053482 -11.7803955 31.419678m-105.391846 -12.878418l0 0c-2.5666504 8.601898 -4.4832764 17.704193 -5.7056885 27.097183m-128.52551 7.4117737l0 0c7.4939575 7.3342896 14.426819 16.161896 20.646423 26.28888m-181.1488 152.45584l0 0c0.7625122 9.363922 1.9672852 18.612427 3.6030884 27.658905" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m823.8117 436.36414l0 0c-5.5324097 -67.94034 12.631531 -135.19711 46.7843 -173.23087c34.15271 -38.033737 78.304504 -40.174118 113.72015 -5.512909l0 0c12.545288 -39.50331 35.506897 -66.77763 61.93933 -73.573c26.432373 -6.7953644 53.23108 7.686371 72.28992 39.06476l0 0c10.68689 -35.816254 31.67102 -59.880203 55.50598 -63.652695c23.83496 -3.7725067 47.14746 13.28035 61.66516 45.1073l0 0c19.307617 -37.965134 50.02649 -53.950027 78.8645 -41.037933c28.83789 12.912079 50.615356 52.40204 55.909058 101.38234l0 0c23.65503 10.782257 43.359253 38.1922 54.021606 75.14795c10.6623535 36.95575 11.236938 79.83206 1.5753174 117.551056l0 0c23.293335 50.660706 28.742188 118.1655 14.313232 177.32278c-14.428955 59.157288 -46.568115 101.079346 -84.423584 110.12164c-0.26672363 55.52124 -18.488403 106.4668 -47.64148 133.20013c-29.153076 26.733337 -64.68518 25.079895 -92.900635 -4.323059c-12.018311 66.49628 -45.845947 115.423645 -86.86792 125.643616c-41.022095 10.21991 -81.88464 -20.09961 -104.93353 -77.859436c-28.253052 28.470032 -62.15442 36.671326 -94.05664 22.753845c-31.90216 -13.917419 -59.11786 -48.78131 -75.50775 -96.72699l0 0c-28.871155 5.645691 -56.785522 -19.350464 -69.88934 -62.582947c-13.103821 -43.232544 -8.607666 -95.49823 11.257141 -130.85791l0 0c-25.753784 -25.329956 -38.894897 -75.59302 -32.5708 -124.57892c6.3240967 -48.98593 30.680359 -85.59436 60.36798 -90.73541z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m795.4438 654.297l0 0c12.15332 11.953308 26.193237 17.37555 40.23462 15.538818m18.391296 177.90631c6.03833 -1.1807861 11.957031 -3.6812744 17.603455 -7.4369507m151.95251 81.40704c-4.2469482 -10.642822 -7.8027344 -22.015564 -10.6067505 -33.9245m202.4154 -13.861694l0 0c2.191162 -12.12323 3.6108398 -24.600708 4.2353516 -37.224182m136.30164 -91.648254c0.28405762 -59.1109 -19.807129 -113.23346 -51.6438 -139.12042m121.75342 -148.31775c-5.155884 20.128387 -13.026855 37.98398 -22.99585 52.16684m-32.596313 -244.87112l0 0c0.87854004 8.128296 1.2850342 16.378754 1.2141113 24.637024m-135.98535 -84.97896l0 0c-4.816284 9.470413 -8.78418 20.053482 -11.7803955 31.419678m-105.391846 -12.878418l0 0c-2.5666504 8.601898 -4.4832764 17.704193 -5.7056885 27.097183m-128.52551 7.4117737l0 0c7.4939575 7.3342896 14.426819 16.161896 20.646423 26.28888m-181.1488 152.45584l0 0c0.7625122 9.363922 1.9672852 18.612427 3.6030884 27.658905" fill-rule="evenodd"/><path fill="#93c47d" d="m132.88327 64.56065l0 0c0 -3.7975044 3.0784912 -6.875988 6.875992 -6.875988l278.59448 0c1.8236084 0 3.5725403 0.7244339 4.86203 2.0139313c1.2895203 1.2894974 2.0139465 3.0384293 2.0139465 4.8620567l0 27.50312c0 3.7975006 -3.0784912 6.875984 -6.8759766 6.875984l-278.59448 0c-3.7975006 0 -6.875992 -3.0784836 -6.875992 -6.875984z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 57.681087l239.2756 0l0 32.9745l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m187.70293 80.761086l0 -10.484375l3.9375 0q1.203125 0 1.921875 0.3125q0.734375 0.3125 1.140625 0.984375q0.40625 0.65625 0.40625 1.375q0 0.671875 -0.375 1.265625q-0.359375 0.59375 -1.09375 0.96875q0.953125 0.28125 1.46875 0.953125q0.515625 0.671875 0.515625 1.578125q0 0.75 -0.3125 1.390625q-0.3125 0.625 -0.78125 0.96875q-0.453125 0.34375 -1.140625 0.515625q-0.6875 0.171875 -1.6875 0.171875l-4.0 0zm1.390625 -6.078125l2.265625 0q0.921875 0 1.3125 -0.125q0.53125 -0.15625 0.796875 -0.515625q0.28125 -0.375 0.28125 -0.921875q0 -0.53125 -0.25 -0.921875q-0.25 -0.390625 -0.71875 -0.53125q-0.46875 -0.15625 -1.59375 -0.15625l-2.09375 0l0 3.171875zm0 4.84375l2.609375 0q0.671875 0 0.9375 -0.046875q0.484375 -0.09375 0.796875 -0.296875q0.328125 -0.203125 0.53125 -0.578125q0.21875 -0.390625 0.21875 -0.890625q0 -0.578125 -0.3125 -1.015625q-0.296875 -0.4375 -0.828125 -0.609375q-0.53125 -0.171875 -1.53125 -0.171875l-2.421875 0l0 3.609375zm8.259979 1.234375l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.291733 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm5.21875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686493 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2126007 0.21875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.7917328 1.546875l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm12.150101 2.265625l0 -10.484375l7.078125 0l0 1.234375l-5.6875 0l0 3.25l4.921875 0l0 1.234375l-4.921875 0l0 4.765625l-1.390625 0zm9.108871 0l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8219757 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm10.584259 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.943665 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.1150208 -1.953125q0 -2.609375 1.40625 -4.078125q1.40625 -1.484375 3.625 -1.484375q1.453125 0 2.609375 0.703125q1.171875 0.6875 1.78125 1.921875q0.609375 1.234375 0.609375 2.8125q0 1.59375 -0.640625 2.859375q-0.640625 1.265625 -1.828125 1.90625q-1.171875 0.640625 -2.546875 0.640625q-1.46875 0 -2.640625 -0.703125q-1.171875 -0.71875 -1.78125 -1.953125q-0.59375 -1.25 -0.59375 -2.625zm1.4375 0.015625q0 1.90625 1.015625 3.0q1.015625 1.078125 2.5625 1.078125q1.5625 0 2.578125 -1.09375q1.015625 -1.109375 1.015625 -3.125q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.96875 -1.265625 -1.484375q-0.828125 -0.53125 -1.875 -0.53125q-1.46875 0 -2.53125 1.015625q-1.0625 1.015625 -1.0625 3.375zm10.368835 5.09375l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm10.58429 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.537384 0l4.0625 -5.46875l-3.578125 -5.015625l1.65625 0l1.890625 2.6875q0.59375 0.828125 0.84375 1.28125q0.359375 -0.5625 0.84375 -1.1875l2.109375 -2.78125l1.5 0l-3.6875 4.9375l3.984375 5.546875l-1.71875 0l-2.640625 -3.75q-0.21875 -0.3125 -0.46875 -0.6875q-0.34375 0.5625 -0.5 0.78125l-2.625 3.65625l-1.671875 0zm14.941345 0l0 -10.484375l7.59375 0l0 1.234375l-6.203125 0l0 3.203125l5.796875 0l0 1.234375l-5.796875 0l0 3.578125l6.4375 0l0 1.234375l-7.828125 0zm8.728729 0l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm8.1875 2.90625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.5104675 -0.046875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7087708 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m10.267716 19.75853l229.5748 0l0 29.92126l-229.5748 0z" fill-rule="evenodd"/><path fill="#93c47d" d="m20.627092 33.319153l5.34375 0q1.59375 0 2.359375 0.140625q0.78125 0.125 1.390625 0.546875q0.625 0.421875 1.03125 1.125q0.40625 0.6875 0.40625 1.546875q0 0.9375 -0.5 1.734375q-0.5 0.78125 -1.375 1.171875q1.21875 0.359375 1.875 1.21875q0.65625 0.84375 0.65625 2.0q0 0.921875 -0.421875 1.78125q-0.421875 0.859375 -1.15625 1.375q-0.734375 0.515625 -1.796875 0.625q-0.6875 0.078125 -3.265625 0.09375l-4.546875 0l0 -13.359375zm2.703125 2.234375l0 3.078125l1.765625 0q1.578125 0 1.953125 -0.046875q0.703125 -0.078125 1.09375 -0.46875q0.40625 -0.40625 0.40625 -1.046875q0 -0.625 -0.34375 -1.0q-0.34375 -0.390625 -1.015625 -0.484375q-0.40625 -0.03125 -2.3125 -0.03125l-1.546875 0zm0 5.296875l0 3.578125l2.5 0q1.453125 0 1.84375 -0.078125q0.609375 -0.109375 0.984375 -0.53125q0.375 -0.421875 0.375 -1.140625q0 -0.59375 -0.296875 -1.015625q-0.28125 -0.421875 -0.84375 -0.609375q-0.546875 -0.203125 -2.390625 -0.203125l-2.171875 0zm13.207319 5.828125l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm10.400894 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm7.281967 5.671875l-3.90625 -9.671875l2.6875 0l1.828125 4.9375l0.53125 1.640625q0.203125 -0.625 0.265625 -0.828125q0.125 -0.40625 0.265625 -0.8125l1.84375 -4.9375l2.625 0l-3.84375 9.671875l-2.296875 0zm7.719467 -10.984375l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm9.620804 -9.671875l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm3.681427 2.953125l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm4.141342 1.875l2.5625 -0.390625q0.171875 0.75 0.671875 1.140625q0.5 0.390625 1.40625 0.390625q0.984375 0 1.484375 -0.375q0.34375 -0.25 0.34375 -0.671875q0 -0.296875 -0.1875 -0.484375q-0.1875 -0.1875 -0.859375 -0.34375q-3.09375 -0.6875 -3.921875 -1.25q-1.140625 -0.78125 -1.140625 -2.171875q0 -1.265625 0.984375 -2.109375q1.0 -0.859375 3.078125 -0.859375q1.984375 0 2.953125 0.65625q0.96875 0.640625 1.328125 1.90625l-2.40625 0.4375q-0.15625 -0.5625 -0.59375 -0.859375q-0.421875 -0.296875 -1.234375 -0.296875q-1.0 0 -1.4375 0.28125q-0.296875 0.203125 -0.296875 0.515625q0 0.265625 0.25 0.46875q0.34375 0.25 2.390625 0.71875q2.046875 0.453125 2.859375 1.140625q0.796875 0.671875 0.796875 1.890625q0 1.34375 -1.109375 2.296875q-1.109375 0.953125 -3.28125 0.953125q-1.984375 0 -3.140625 -0.796875q-1.140625 -0.8125 -1.5 -2.1875zm16.480896 2.765625l0 -13.359375l9.90625 0l0 2.265625l-7.21875 0l0 2.953125l6.71875 0l0 2.25l-6.71875 0l0 3.640625l7.46875 0l0 2.25l-10.15625 0zm11.193573 0l3.484375 -4.984375l-3.34375 -4.6875l3.125 0l1.71875 2.65625l1.796875 -2.65625l3.015625 0l-3.28125 4.578125l3.578125 5.09375l-3.140625 0l-1.96875 -3.0l-1.984375 3.0l-3.0 0zm11.531967 -9.671875l2.390625 0l0 1.421875q0.46875 -0.734375 1.25 -1.1875q0.796875 -0.453125 1.765625 -0.453125q1.6875 0 2.859375 1.328125q1.171875 1.3125 1.171875 3.671875q0 2.421875 -1.1875 3.765625q-1.1875 1.34375 -2.859375 1.34375q-0.8125 0 -1.46875 -0.3125q-0.640625 -0.328125 -1.359375 -1.09375l0 4.875l-2.5625 0l0 -13.359375zm2.53125 4.671875q0 1.625 0.640625 2.40625q0.65625 0.78125 1.578125 0.78125q0.90625 0 1.484375 -0.71875q0.59375 -0.71875 0.59375 -2.34375q0 -1.515625 -0.609375 -2.25q-0.609375 -0.75 -1.515625 -0.75q-0.9375 0 -1.5625 0.734375q-0.609375 0.71875 -0.609375 2.140625zm8.349106 0.03125q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm11.817856 4.84375l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm9.244644 -9.671875l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m10.267716 119.95801l229.5748 0l0 29.921265l-229.5748 0z" fill-rule="evenodd"/><path fill="#6d9eeb" d="m20.658342 146.878l0 -13.359375l2.625 0l5.453125 8.921875l0 -8.921875l2.515625 0l0 13.359375l-2.703125 0l-5.390625 -8.703125l0 8.703125l-2.5 0zm19.01982 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm9.063217 -4.0l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm3.572052 9.671875l-3.0625 -9.671875l2.484375 0l1.8125 6.34375l1.671875 -6.34375l2.46875 0l1.609375 6.34375l1.859375 -6.34375l2.515625 0l-3.109375 9.671875l-2.453125 0l-1.671875 -6.21875l-1.640625 6.21875l-2.484375 0zm12.120804 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm11.817856 4.84375l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm4.713394 0l0 -13.359375l2.5625 0l0 7.09375l3.0 -3.40625l3.140625 0l-3.296875 3.53125l3.53125 6.140625l-2.75 0l-2.4375 -4.34375l-1.1875 1.25l0 3.09375l-2.5625 0zm15.668396 0l0 -13.359375l4.328125 0q2.453125 0 3.203125 0.203125q1.140625 0.296875 1.921875 1.3125q0.78125 1.0 0.78125 2.59375q0 1.234375 -0.453125 2.078125q-0.453125 0.828125 -1.140625 1.3125q-0.6875 0.46875 -1.390625 0.625q-0.96875 0.203125 -2.796875 0.203125l-1.765625 0l0 5.03125l-2.6875 0zm2.6875 -11.09375l0 3.78125l1.484375 0q1.59375 0 2.125 -0.203125q0.546875 -0.203125 0.84375 -0.65625q0.3125 -0.453125 0.3125 -1.03125q0 -0.734375 -0.4375 -1.203125q-0.421875 -0.484375 -1.078125 -0.59375q-0.484375 -0.09375 -1.9375 -0.09375l-1.3125 0zm12.193573 11.09375l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm10.400894 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0zm4.547592 -4.0l2.390625 0l0 1.421875q0.46875 -0.734375 1.25 -1.1875q0.796875 -0.453125 1.765625 -0.453125q1.6875 0 2.859375 1.328125q1.171875 1.3125 1.171875 3.671875q0 2.421875 -1.1875 3.765625q-1.1875 1.34375 -2.859375 1.34375q-0.8125 0 -1.46875 -0.3125q-0.640625 -0.328125 -1.359375 -1.09375l0 4.875l-2.5625 0l0 -13.359375zm2.53125 4.671875q0 1.625 0.640625 2.40625q0.65625 0.78125 1.578125 0.78125q0.90625 0 1.484375 -0.71875q0.59375 -0.71875 0.59375 -2.34375q0 -1.515625 -0.609375 -2.25q-0.609375 -0.75 -1.515625 -0.75q-0.9375 0 -1.5625 0.734375q-0.609375 0.71875 -0.609375 2.140625zm10.849106 -1.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm7.500717 4.640625l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm6.713394 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm9.485092 -5.03125l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm1.775177 -1.3125l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.589554 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm18.161606 4.84375l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625z" fill-rule="nonzero"/><path fill="#cc0000" d="m332.85938 918.2178l0 0c0 -4.2473145 3.4431458 -7.6904297 7.69046 -7.6904297l122.79233 0c2.0396423 0 3.9957275 0.8102417 5.4379883 2.2525024c1.4422302 1.4421997 2.252472 3.3983154 2.252472 5.4379272l0 30.760864c0 4.2473145 -3.4431458 7.6904297 -7.69046 7.6904297l-122.79233 0l0 0c-4.2473145 0 -7.69046 -3.4431152 -7.69046 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m336.56036 907.0275l130.77167 0l0 36.472412l-130.77167 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m354.63516 925.99817l0 -1.234375l4.4375 -0.015625l0 3.90625q-1.03125 0.8125 -2.125 1.21875q-1.078125 0.40625 -2.21875 0.40625q-1.546875 0 -2.8125 -0.65625q-1.265625 -0.65625 -1.90625 -1.90625q-0.640625 -1.265625 -0.640625 -2.8125q0 -1.53125 0.640625 -2.84375q0.640625 -1.328125 1.84375 -1.96875q1.203125 -0.65625 2.765625 -0.65625q1.140625 0 2.0625 0.375q0.921875 0.359375 1.4375 1.03125q0.53125 0.65625 0.796875 1.703125l-1.25 0.34375q-0.234375 -0.796875 -0.59375 -1.25q-0.34375 -0.46875 -1.0 -0.734375q-0.65625 -0.28125 -1.4375 -0.28125q-0.953125 0 -1.65625 0.296875q-0.6875 0.28125 -1.125 0.765625q-0.421875 0.46875 -0.65625 1.03125q-0.390625 0.96875 -0.390625 2.109375q0 1.40625 0.46875 2.359375q0.484375 0.9375 1.40625 1.390625q0.9375 0.453125 1.96875 0.453125q0.90625 0 1.765625 -0.34375q0.859375 -0.34375 1.296875 -0.734375l0 -1.953125l-3.078125 0zm11.525085 1.65625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 4.53125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354218 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849396 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.1198425 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.462616 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.4105835 4.53125l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm11.353729 5.5l0 -4.4375l-4.046875 -6.046875l1.6875 0l2.0625 3.15625q0.578125 0.890625 1.0625 1.78125q0.484375 -0.828125 1.15625 -1.859375l2.03125 -3.078125l1.609375 0l-4.1875 6.046875l0 4.4375l-1.375 0zm6.7912292 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm18.553009 -1.125q0.96875 0.671875 1.78125 0.96875l-0.40625 0.96875q-1.125 -0.40625 -2.25 -1.28125q-1.171875 0.640625 -2.578125 0.640625q-1.421875 0 -2.59375 -0.671875q-1.15625 -0.6875 -1.78125 -1.9375q-0.625 -1.25 -0.625 -2.8125q0 -1.546875 0.625 -2.8125q0.640625 -1.28125 1.796875 -1.9375q1.171875 -0.671875 2.609375 -0.671875q1.453125 0 2.609375 0.6875q1.171875 0.6875 1.78125 1.9375q0.609375 1.234375 0.609375 2.796875q0 1.296875 -0.390625 2.328125q-0.390625 1.03125 -1.1875 1.796875zm-3.046875 -1.765625q1.203125 0.328125 1.984375 1.0q1.21875 -1.125 1.21875 -3.359375q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.953125 -1.265625 -1.46875q-0.828125 -0.53125 -1.875 -0.53125q-1.546875 0 -2.578125 1.0625q-1.015625 1.0625 -1.015625 3.171875q0 2.046875 1.015625 3.140625q1.015625 1.09375 2.578125 1.09375q0.75 0 1.40625 -0.265625q-0.65625 -0.421875 -1.375 -0.609375l0.34375 -1.0z" fill-rule="nonzero"/><path fill="#ffffff" d="m353.22336 948.10754l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.880646 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.166748 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.963593 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.259491 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.411896 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm13.7074585 7.609375q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm7.3493958 -3.078125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.276123 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm5.365509 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm5.7400208 3.078125l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#cc0000" d="m91.84247 917.8458l0 0c0 -4.2473145 3.4431305 -7.6904297 7.6904373 -7.6904297l122.79235 0c2.039627 0 3.9957275 0.8102417 5.437958 2.2525024c1.4422455 1.4421997 2.2524872 3.3983154 2.2524872 5.4379272l0 30.760864c0 4.2473145 -3.4431305 7.6904297 -7.690445 7.6904297l-122.79235 0l0 0c-4.247307 0 -7.6904373 -3.4431152 -7.6904373 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m95.54325 907.3982l130.77167 0l0 40.503906l-130.77167 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m127.778625 927.10315l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm10.025604 6.296875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm7.390625 -2.921875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.963608 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594757 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354233 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.666733 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7405243 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m143.04684 948.47815l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm18.035736 4.265625q-0.640625 0.703125 -1.390625 1.0625q-0.75 0.34375 -1.625 0.34375q-1.609375 0 -2.5625 -1.078125q-0.765625 -0.890625 -0.765625 -1.984375q0 -0.96875 0.625 -1.75q0.625 -0.796875 1.875 -1.390625q-0.703125 -0.8125 -0.9375 -1.3125q-0.234375 -0.515625 -0.234375 -0.984375q0 -0.953125 0.734375 -1.640625q0.734375 -0.703125 1.859375 -0.703125q1.078125 0 1.75 0.65625q0.6875 0.65625 0.6875 1.578125q0 1.5 -1.96875 2.5625l1.875 2.390625q0.328125 -0.625 0.5 -1.453125l1.34375 0.28125q-0.34375 1.375 -0.9375 2.265625q0.71875 0.953125 1.640625 1.609375l-0.859375 1.03125q-0.78125 -0.5 -1.609375 -1.484375zm-2.625 -5.46875q0.84375 -0.5 1.078125 -0.859375q0.25 -0.375 0.25 -0.828125q0 -0.546875 -0.34375 -0.875q-0.328125 -0.34375 -0.84375 -0.34375q-0.515625 0 -0.875 0.34375q-0.34375 0.328125 -0.34375 0.8125q0 0.25 0.125 0.515625q0.125 0.265625 0.375 0.5625l0.578125 0.671875zm1.8125 4.453125l-2.359375 -2.9375q-1.046875 0.625 -1.421875 1.15625q-0.359375 0.53125 -0.359375 1.0625q0 0.640625 0.5 1.328125q0.515625 0.6875 1.453125 0.6875q0.578125 0 1.1875 -0.359375q0.625 -0.375 1.0 -0.9375zm8.84758 2.25l0 -10.484375l4.640625 0q1.40625 0 2.140625 0.28125q0.734375 0.28125 1.171875 1.0q0.4375 0.703125 0.4375 1.578125q0 1.109375 -0.734375 1.875q-0.71875 0.765625 -2.234375 0.984375q0.5625 0.25 0.84375 0.515625q0.609375 0.5625 1.15625 1.390625l1.8125 2.859375l-1.734375 0l-1.390625 -2.1875q-0.609375 -0.9375 -1.0 -1.4375q-0.390625 -0.5 -0.703125 -0.703125q-0.3125 -0.203125 -0.640625 -0.28125q-0.234375 -0.046875 -0.765625 -0.046875l-1.609375 0l0 4.65625l-1.390625 0zm1.390625 -5.859375l2.984375 0q0.9375 0 1.46875 -0.1875q0.546875 -0.203125 0.828125 -0.640625q0.28125 -0.4375 0.28125 -0.9375q0 -0.75 -0.546875 -1.21875q-0.53125 -0.484375 -1.703125 -0.484375l-3.3125 0l0 3.46875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m95.543304 723.9577l0 -180.0315l29.921265 0l0 180.0315z" fill-rule="evenodd"/><path fill="#e69138" d="m122.4633 710.1921l-13.359375 4.78125l0 -2.9375l9.890625 -3.375l-9.890625 -3.265625l0 -2.859375l13.359375 4.78125l0 2.875zm-10.984375 -8.684448l-2.375 0l0 -2.5625l2.375 0l0 2.5625zm10.984375 0l-9.671875 0l0 -2.5625l9.671875 0l0 2.5625zm0 -7.8395386l-9.671875 3.90625l0 -2.6875l4.9375 -1.828125l1.640625 -0.53125q-0.625 -0.203125 -0.828125 -0.265625q-0.40625 -0.125 -0.8125 -0.265625l-4.9375 -1.84375l0 -2.625l9.671875 3.84375l0 2.296875zm-6.71875 -9.625732l-0.421875 2.328125q-1.40625 -0.40625 -2.078125 -1.359375q-0.671875 -0.953125 -0.671875 -2.84375q0 -1.703125 0.40625 -2.546875q0.40625 -0.84375 1.03125 -1.171875q0.625 -0.34375 2.28125 -0.34375l3.0 0.015625q1.265625 0 1.875 -0.109375q0.609375 -0.125 1.296875 -0.46875l0 2.53125q-0.25 0.109375 -0.75 0.25q-0.234375 0.0625 -0.3125 0.09375q0.640625 0.65625 0.96875 1.40625q0.3125 0.734375 0.3125 1.59375q0 1.484375 -0.8125 2.34375q-0.8125 0.859375 -2.046875 0.859375q-0.828125 0 -1.46875 -0.390625q-0.640625 -0.390625 -0.96875 -1.09375q-0.34375 -0.703125 -0.609375 -2.03125q-0.328125 -1.796875 -0.625 -2.484375l-0.25 0q-0.75 0 -1.0625 0.359375q-0.3125 0.359375 -0.3125 1.375q0 0.6875 0.28125 1.078125q0.265625 0.375 0.9375 0.609375zm2.078125 -3.421875q0.15625 0.484375 0.390625 1.5625q0.21875 1.0625 0.4375 1.390625q0.359375 0.5 0.90625 0.5q0.53125 0 0.9375 -0.40625q0.390625 -0.40625 0.390625 -1.015625q0 -0.703125 -0.46875 -1.328125q-0.34375 -0.46875 -0.84375 -0.609375q-0.328125 -0.09375 -1.25 -0.09375l-0.5 0zm4.640625 -13.922607l0 2.390625l-1.421875 0q0.828125 0.59375 1.234375 1.40625q0.40625 0.796875 0.40625 1.609375q0 1.671875 -1.34375 2.859375q-1.34375 1.1875 -3.75 1.1875q-2.453125 0 -3.734375 -1.15625q-1.28125 -1.15625 -1.28125 -2.921875q0 -1.625 1.34375 -2.8125l-4.8125 0l0 -2.5625l13.359375 0zm-5.046875 6.84375q1.546875 0 2.234375 -0.4375q1.015625 -0.609375 1.015625 -1.71875q0 -0.890625 -0.75 -1.5q-0.765625 -0.625 -2.25 -0.625q-1.671875 0 -2.40625 0.609375q-0.734375 0.59375 -0.734375 1.53125q0 0.90625 0.734375 1.53125q0.71875 0.609375 2.15625 0.609375zm0.078125 -8.770935q-1.28125 0 -2.46875 -0.625q-1.203125 -0.625 -1.828125 -1.78125q-0.625 -1.15625 -0.625 -2.578125q0 -2.1875 1.421875 -3.59375q1.421875 -1.40625 3.609375 -1.40625q2.1875 0 3.640625 1.421875q1.4375 1.421875 1.4375 3.5625q0 1.328125 -0.59375 2.546875q-0.609375 1.203125 -1.765625 1.828125q-1.171875 0.625 -2.828125 0.625zm0.125 -2.625q1.453125 0 2.21875 -0.671875q0.75 -0.6875 0.75 -1.6875q0 -1.0 -0.75 -1.671875q-0.765625 -0.6875 -2.234375 -0.6875q-1.421875 0 -2.1875 0.6875q-0.765625 0.671875 -0.765625 1.671875q0 1.0 0.765625 1.6875q0.765625 0.671875 2.203125 0.671875zm4.84375 -14.56366l-13.359375 0l0 -2.703125l5.265625 0l0 -5.28125l-5.265625 0l0 -2.703125l13.359375 0l0 2.703125l-5.84375 0l0 5.28125l5.84375 0l0 2.703125zm0 -13.551086l-13.25 0l0 -2.6875l11.0 0l0 -6.703125l2.25 0l0 9.390625zm-4.34375 -10.630371l-0.25 -2.625q1.3125 -0.234375 1.9375 -0.953125q0.609375 -0.734375 0.609375 -1.96875q0 -1.296875 -0.546875 -1.953125q-0.546875 -0.671875 -1.28125 -0.671875q-0.484375 0 -0.8125 0.28125q-0.328125 0.28125 -0.578125 0.96875q-0.15625 0.484375 -0.578125 2.171875q-0.546875 2.15625 -1.328125 3.03125q-1.09375 1.234375 -2.6875 1.234375q-1.015625 0 -1.90625 -0.578125q-0.890625 -0.578125 -1.34375 -1.65625q-0.46875 -1.09375 -0.46875 -2.640625q0 -2.515625 1.109375 -3.78125q1.09375 -1.28125 2.9375 -1.34375l0.109375 2.703125q-1.03125 0.171875 -1.46875 0.75q-0.453125 0.5625 -0.453125 1.703125q0 1.171875 0.46875 1.84375q0.3125 0.421875 0.84375 0.421875q0.46875 0 0.796875 -0.40625q0.4375 -0.5 0.90625 -2.46875q0.453125 -1.96875 0.953125 -2.90625q0.5 -0.953125 1.359375 -1.484375q0.859375 -0.53125 2.125 -0.53125q1.15625 0 2.15625 0.640625q1.0 0.640625 1.484375 1.8125q0.484375 1.15625 0.484375 2.890625q0 2.53125 -1.171875 3.890625q-1.171875 1.359375 -3.40625 1.625zm2.71875 -30.126892l1.96875 1.546875q-0.546875 1.140625 -1.53125 2.1875q0.734375 0.8125 1.078125 1.71875q0.34375 0.90625 0.34375 2.171875q0 2.5 -1.40625 3.75q-1.078125 0.984375 -2.484375 0.984375q-1.296875 0 -2.3125 -0.765625q-1.03125 -0.765625 -1.78125 -2.296875q-0.8125 0.6875 -1.546875 1.03125q-0.734375 0.34375 -1.390625 0.34375q-1.203125 0 -2.046875 -0.96875q-0.84375 -0.96875 -0.84375 -2.765625q0 -1.734375 0.890625 -2.703125q0.875 -0.96875 2.140625 -0.96875q0.8125 0 1.546875 0.484375q0.734375 0.46875 1.65625 1.9375l2.4375 -1.859375q-0.578125 -0.328125 -1.515625 -0.5625l0.53125 -2.3125q1.21875 0.34375 1.796875 0.609375q0.5625 0.25 0.953125 0.546875q0.40625 -0.4375 0.890625 -1.109375q0.46875 -0.6875 0.625 -1.0zm-7.265625 6.96875l-0.53125 -0.6875q-0.59375 -0.765625 -1.171875 -0.765625q-0.5 0 -0.84375 0.375q-0.34375 0.359375 -0.34375 0.984375q0 0.609375 0.3125 0.96875q0.296875 0.34375 0.703125 0.34375q0.46875 0 1.140625 -0.59375l0.734375 -0.625zm2.78125 1.0q0.4375 0.890625 1.078125 1.328125q0.625 0.4375 1.28125 0.4375q0.828125 0 1.359375 -0.546875q0.515625 -0.546875 0.515625 -1.453125q0 -0.609375 -0.234375 -1.15625q-0.234375 -0.5625 -0.78125 -1.21875l-3.21875 2.609375zm6.109375 -14.734375l-13.359375 0l0 -2.6875l13.359375 0l0 2.6875zm0 -5.2614136l-13.359375 0l0 -4.328125q0 -2.453125 0.203125 -3.203125q0.296875 -1.140625 1.3125 -1.921875q1.0 -0.78125 2.59375 -0.78125q1.234375 0 2.078125 0.453125q0.828125 0.453125 1.3125 1.140625q0.46875 0.6875 0.625 1.390625q0.203125 0.96875 0.203125 2.796875l0 1.765625l5.03125 0l0 2.6875zm-11.09375 -2.6875l3.78125 0l0 -1.484375q0 -1.59375 -0.203125 -2.125q-0.203125 -0.546875 -0.65625 -0.84375q-0.453125 -0.3125 -1.03125 -0.3125q-0.734375 0 -1.203125 0.4375q-0.484375 0.421875 -0.59375 1.078125q-0.09375 0.484375 -0.09375 1.9375l0 1.3125zm11.09375 -9.6779175l-13.359375 0l0 -2.6875l13.359375 0l0 2.6875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m85.33596 889.62335l0 -145.13385l64.44094 0l0 145.13385z" fill-rule="evenodd"/><path fill="#cc0000" d="m112.25596 879.264l-13.359375 0l0 -2.703125l5.265625 0l0 -5.28125l-5.265625 0l0 -2.703125l13.359375 0l0 2.703125l-5.84375 0l0 5.28125l5.84375 0l0 2.703125zm0 -15.363586l-13.359375 3.1875l0 -2.765625l9.171875 -2.015625l-9.171875 -2.4375l0 -3.203125l9.328125 -2.34375l-9.328125 -2.046875l0 -2.71875l13.359375 3.25l0 2.859375l-9.984375 2.65625l9.984375 2.65625l0 2.921875zm-2.765625 -19.979431l-0.390625 -2.5625q0.75 -0.171875 1.140625 -0.671875q0.390625 -0.5 0.390625 -1.40625q0 -0.984375 -0.375 -1.484375q-0.25 -0.34375 -0.671875 -0.34375q-0.296875 0 -0.484375 0.1875q-0.1875 0.1875 -0.34375 0.859375q-0.6875 3.09375 -1.25 3.921875q-0.78125 1.140625 -2.171875 1.140625q-1.265625 0 -2.109375 -0.984375q-0.859375 -1.0 -0.859375 -3.078125q0 -1.984375 0.65625 -2.953125q0.640625 -0.96875 1.90625 -1.328125l0.4375 2.40625q-0.5625 0.15625 -0.859375 0.59375q-0.296875 0.421875 -0.296875 1.234375q0 1.0 0.28125 1.4375q0.203125 0.296875 0.515625 0.296875q0.265625 0 0.46875 -0.25q0.25 -0.34375 0.71875 -2.390625q0.453125 -2.046875 1.140625 -2.859375q0.671875 -0.796875 1.890625 -0.796875q1.34375 0 2.296875 1.109375q0.953125 1.109375 0.953125 3.28125q0 1.984375 -0.796875 3.140625q-0.8125 1.140625 -2.1875 1.5zm-6.90625 -10.063232l0 -2.71875l6.859375 -2.328125l-6.859375 -2.25l0 -2.65625l9.3125 3.421875l1.6875 0.609375q0.84375 0.328125 1.28125 0.640625q0.453125 0.3125 0.71875 0.703125q0.28125 0.390625 0.4375 0.96875q0.15625 0.578125 0.15625 1.3125q0 0.734375 -0.15625 1.453125l-2.0 0.21875q0.125 -0.59375 0.125 -1.078125q0 -0.890625 -0.53125 -1.3125q-0.515625 -0.4375 -1.328125 -0.671875l-9.703125 3.6875zm9.671875 -20.391357l0 2.5625l-4.9375 0q-1.5625 0 -2.015625 0.171875q-0.46875 0.15625 -0.71875 0.53125q-0.265625 0.359375 -0.265625 0.875q0 0.671875 0.375 1.203125q0.359375 0.53125 0.96875 0.734375q0.59375 0.1875 2.21875 0.1875l4.375 0l0 2.546875l-9.671875 0l0 -2.375l1.421875 0q-1.640625 -1.265625 -1.640625 -3.1875q0 -0.84375 0.3125 -1.546875q0.296875 -0.703125 0.78125 -1.0625q0.46875 -0.359375 1.078125 -0.5q0.59375 -0.140625 1.703125 -0.140625l6.015625 0zm-9.671875 -7.03656l2.03125 0l0 1.75l3.90625 0q1.1875 0 1.390625 -0.046875q0.1875 -0.046875 0.3125 -0.21875q0.125 -0.1875 0.125 -0.4375q0 -0.359375 -0.25 -1.03125l2.0 -0.21875q0.375 0.890625 0.375 2.015625q0 0.703125 -0.234375 1.265625q-0.234375 0.546875 -0.59375 0.8125q-0.375 0.25 -1.0 0.34375q-0.453125 0.09375 -1.8125 0.09375l-4.21875 0l0 1.171875l-2.03125 0l0 -1.171875l-1.921875 0l-1.5 -2.578125l3.421875 0l0 -1.75zm-3.6875 -4.3220825l4.921875 0q-1.453125 -1.234375 -1.453125 -2.953125q0 -0.890625 0.328125 -1.59375q0.328125 -0.71875 0.84375 -1.078125q0.5 -0.359375 1.125 -0.484375q0.609375 -0.140625 1.921875 -0.140625l5.671875 0l0 2.5625l-5.109375 0q-1.515625 0 -1.921875 0.15625q-0.421875 0.140625 -0.65625 0.515625q-0.25 0.359375 -0.25 0.90625q0 0.640625 0.3125 1.140625q0.3125 0.5 0.9375 0.734375q0.625 0.234375 1.84375 0.234375l4.84375 0l0 2.5625l-13.359375 0l0 -2.5625zm10.28125 -14.442871l0.421875 -2.546875q1.40625 0.484375 2.140625 1.546875q0.734375 1.0625 0.734375 2.65625q0 2.515625 -1.65625 3.734375q-1.3125 0.953125 -3.328125 0.953125q-2.40625 0 -3.765625 -1.25q-1.359375 -1.265625 -1.359375 -3.1875q0 -2.15625 1.421875 -3.40625q1.421875 -1.25 4.375 -1.1875l0 6.40625q1.140625 -0.03125 1.78125 -0.609375q0.625 -0.59375 0.625 -1.484375q0 -0.59375 -0.328125 -1.0q-0.328125 -0.421875 -1.0625 -0.625zm-2.59375 -0.15625q-1.109375 0.03125 -1.6875 0.578125q-0.578125 0.546875 -0.578125 1.328125q0 0.84375 0.609375 1.390625q0.609375 0.546875 1.65625 0.53125l0 -3.828125zm2.90625 -3.7194214l-0.390625 -2.5625q0.75 -0.171875 1.140625 -0.671875q0.390625 -0.5 0.390625 -1.40625q0 -0.984375 -0.375 -1.484375q-0.25 -0.34375 -0.671875 -0.34375q-0.296875 0 -0.484375 0.1875q-0.1875 0.1875 -0.34375 0.859375q-0.6875 3.09375 -1.25 3.921875q-0.78125 1.140625 -2.171875 1.140625q-1.265625 0 -2.109375 -0.984375q-0.859375 -1.0 -0.859375 -3.078125q0 -1.984375 0.65625 -2.953125q0.640625 -0.96875 1.90625 -1.328125l0.4375 2.40625q-0.5625 0.15625 -0.859375 0.59375q-0.296875 0.421875 -0.296875 1.234375q0 1.0 0.28125 1.4375q0.203125 0.296875 0.515625 0.296875q0.265625 0 0.46875 -0.25q0.25 -0.34375 0.71875 -2.390625q0.453125 -2.046875 1.140625 -2.859375q0.671875 -0.796875 1.890625 -0.796875q1.34375 0 2.296875 1.109375q0.953125 1.109375 0.953125 3.28125q0 1.984375 -0.796875 3.140625q-0.8125 1.140625 -2.1875 1.5zm-8.21875 -11.281982l-2.375 0l0 -2.5625l2.375 0l0 2.5625zm10.984375 0l-9.671875 0l0 -2.5625l9.671875 0l0 2.5625zm-2.765625 -4.2770386l-0.390625 -2.5625q0.75 -0.171875 1.140625 -0.671875q0.390625 -0.5 0.390625 -1.40625q0 -0.984375 -0.375 -1.484375q-0.25 -0.34375 -0.671875 -0.34375q-0.296875 0 -0.484375 0.1875q-0.1875 0.1875 -0.34375 0.859375q-0.6875 3.09375 -1.25 3.921875q-0.78125 1.140625 -2.171875 1.140625q-1.265625 0 -2.109375 -0.984375q-0.859375 -1.0 -0.859375 -3.078125q0 -1.984375 0.65625 -2.953125q0.640625 -0.96875 1.90625 -1.328125l0.4375 2.40625q-0.5625 0.15625 -0.859375 0.59375q-0.296875 0.421875 -0.296875 1.234375q0 1.0 0.28125 1.4375q0.203125 0.296875 0.515625 0.296875q0.265625 0 0.46875 -0.25q0.25 -0.34375 0.71875 -2.390625q0.453125 -2.046875 1.140625 -2.859375q0.671875 -0.796875 1.890625 -0.796875q1.34375 0 2.296875 1.109375q0.953125 1.109375 0.953125 3.28125q0 1.984375 -0.796875 3.140625q-0.8125 1.140625 -2.1875 1.5z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.88327 162.76764l0 0c0 -3.797943 3.0788422 -6.8767853 6.8767853 -6.8767853l278.5929 0c1.823822 0 3.5729675 0.7245178 4.86261 2.0141602c1.2896423 1.2896423 2.0141602 3.0387878 2.0141602 4.862625l0 27.506287c0 3.7979279 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788422 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 156.18744l239.2756 0l0 21.070877l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m184.40805 175.89244l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm12.900604 2.21875l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2438507 1.15625l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073578 3.8125l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm12.177521 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.2873993 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561493 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm8.151108 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561493 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.06248474 0.359375 0.06248474 1.25l0 4.671875l-1.2812347 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916733 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm13.958099 3.9375l0 -9.25l-3.453125 0l0 -1.234375l8.3125 0l0 1.234375l-3.46875 0l0 9.25l-1.390625 0zm5.5646057 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849396 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.6354675 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.15625 2.265625l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.275116 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717896 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.3073425 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.635498 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.88327 260.97858l0 0c0 -3.797943 3.0788422 -6.8767853 6.8767853 -6.8767853l278.5929 0c1.823822 0 3.5729675 0.7245178 4.86261 2.0141754c1.2896423 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.078827 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 254.39838l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m213.53659 273.80652l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.4592743 -0.125q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307358 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.260483 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166733 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7087708 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm8.143951 0l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.77511597 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm11.5824585 3.796875l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.490509 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.26944 3.375l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm12.994873 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.2448425 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.3806458 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.88327 359.1895l0 0c0 -3.797943 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.078827 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 352.6093l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m212.97243 375.68933l0 -10.484375l3.625 0q1.21875 0 1.859375 0.140625q0.90625 0.203125 1.546875 0.75q0.828125 0.703125 1.234375 1.796875q0.40625 1.09375 0.40625 2.5q0 1.1875 -0.28125 2.109375q-0.265625 0.921875 -0.703125 1.53125q-0.4375 0.609375 -0.96875 0.953125q-0.515625 0.34375 -1.25 0.53125q-0.71875 0.171875 -1.671875 0.171875l-3.796875 0zm1.390625 -1.234375l2.25 0q1.03125 0 1.625 -0.1875q0.59375 -0.203125 0.9375 -0.546875q0.5 -0.5 0.765625 -1.328125q0.28125 -0.84375 0.28125 -2.03125q0 -1.640625 -0.546875 -2.515625q-0.53125 -0.890625 -1.3125 -1.1875q-0.546875 -0.21875 -1.796875 -0.21875l-2.203125 0l0 8.015625zm13.990524 0.296875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2126007 0.21875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.6198578 3.8125l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.7282257 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.8030243 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625153 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84376526 0.46875 -1.8437653 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.64064026 -0.734375 0.64064026 -2.234375q0 -1.40625 -0.64064026 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm8.713608 3.796875l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm13.40625 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm13.181854 4.5625q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7087708 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717896 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.3073425 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm8.151123 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#6d9eeb" d="m132.87679 457.40045l0 0c0 -3.797943 3.0788422 -6.87677 6.87677 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7245178 4.86261 2.0141602c1.2896423 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.078827 6.87677 -6.87677 6.87677l-278.5929 0c-3.7979279 0 -6.87677 -3.078827 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m137.01575 450.82126l288.2205 0l0 21.070862l-288.2205 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m152.81091 473.90125l4.015625 -10.484375l1.5 0l4.296875 10.484375l-1.578125 0l-1.234375 -3.171875l-4.375 0l-1.15625 3.171875l-1.46875 0zm3.015625 -4.3125l3.5625 0l-1.09375 -2.90625q-0.5 -1.3125 -0.75 -2.171875q-0.203125 1.015625 -0.5625 2.0l-1.15625 3.078125zm12.666229 4.3125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.276108 -5.203125l0 -1.484375l1.296875 0l0 1.484375l-1.296875 0zm-1.625 11.953125l0.25 -1.09375q0.375 0.09375 0.59375 0.09375q0.40625 0 0.59375 -0.265625q0.1875 -0.25 0.1875 -1.296875l0 -7.984375l1.296875 0l0 8.015625q0 1.390625 -0.375 1.953125q-0.453125 0.703125 -1.53125 0.703125q-0.53125 0 -1.015625 -0.125zm9.881149 -2.953125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.6511078 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm5.6439514 1.15625l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2751007 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.276108 3.796875l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.209274 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291733 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561493 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916733 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm14.2074585 2.78125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751007 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm11.3793335 3.796875l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.447998 3.8125l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm8.1875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm12.208771 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.5686646 0l0 -1.046875l4.84375 -5.546875q-0.828125 0.03125 -1.453125 0.03125l-3.09375 0l0 -1.03125l6.203125 0l0 0.84375l-4.109375 4.828125l-0.796875 0.875q0.859375 -0.0625 1.625 -0.0625l3.515625 0l0 1.109375l-6.734375 0zm13.21875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.2543335 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197968 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm5.2087708 0l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2750854 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073425 3.8125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.104218 -2.78125l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm7.578125 0.328125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#e69138" d="m132.88327 555.6114l0 0c0 -3.7979736 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788574 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 549.0312l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m210.00502 568.43933l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.9280243 3.671875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.4626007 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.4574585 4.53125l0 -10.484375l1.390625 0l0 4.296875l5.4531097 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.4531097 0l0 4.9375l-1.390625 0zm10.490524 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.7292175 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.55072 3.375l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8375854 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm12.035736 8.40625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197998 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.1667175 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.937012 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240509 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.1667175 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#e69138" d="m132.88327 653.8223l0 0c0 -3.7979736 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788574 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 647.2421l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m214.7496 666.65027l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.9280243 3.671875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.4626007 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm10.7387085 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2125854 -1.625l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104218 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm16.285583 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.6667175 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.0217896 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm7.3229675 3.9375l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0z" fill-rule="nonzero"/><path fill="#cc0000" d="m132.88327 752.03326l0 0c0 -3.7979736 3.0788422 -6.87677 6.8767853 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.87677 6.87677l-278.5929 0c-3.797943 0 -6.8767853 -3.0788574 -6.8767853 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.4187 745.45306l239.2756 0l0 21.070862l-239.2756 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m201.58206 764.8612l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm2.9280243 3.671875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135483 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119858 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.4626007 -1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm14.4105835 4.53125l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm6.3543854 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm5.3655243 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.171875 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm8.229218 3.8125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm6.807373 0q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm6.3385925 3.96875l3.046875 -10.84375l1.03125 0l-3.046875 10.84375l-1.03125 0zm8.196991 -0.171875l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm6.35437 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7405396 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm11.91571 5.171875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.979248 3.75l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.4118958 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 -5.203125l0 -1.484375l1.296875 0l0 1.484375l-1.296875 0zm-1.625 11.953125l0.25 -1.09375q0.375 0.09375 0.59375 0.09375q0.40625 0 0.59375 -0.265625q0.1875 -0.25 0.1875 -1.296875l0 -7.984375l1.296875 0l0 8.015625q0 1.390625 -0.375 1.953125q-0.453125 0.703125 -1.53125 0.703125q-0.53125 0 -1.015625 -0.125zm10.099915 -5.40625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135468 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm5.1875 1.625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875z" fill-rule="nonzero"/><path fill="#cc0000" d="m132.88466 1028.0884l0 0c0 -3.7979736 3.0788422 -6.87677 6.87677 -6.87677l278.5929 0c1.823822 0 3.5729675 0.7244873 4.86261 2.0141602c1.2896423 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506226c0 3.7979736 -3.078827 6.876831 -6.87677 6.876831l-278.5929 0c-3.7979279 0 -6.87677 -3.0788574 -6.87677 -6.876831z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m159.42009 1021.5082l239.27559 0l0 21.070923l-239.27559 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m222.78067 1044.5881l0 -10.484375l4.640625 0q1.40625 0 2.140625 0.28125q0.734375 0.28125 1.171875 1.0q0.4375 0.703125 0.4375 1.578125q0 1.109375 -0.734375 1.875q-0.71875 0.765625 -2.234375 0.984375q0.5625 0.25 0.84375 0.515625q0.609375 0.5625 1.15625 1.390625l1.8125 2.859375l-1.734375 0l-1.390625 -2.1875q-0.609375 -0.9375 -1.0 -1.4375q-0.390625 -0.5 -0.703125 -0.703125q-0.3125 -0.203125 -0.640625 -0.28125q-0.234375 -0.046875 -0.765625 -0.046875l-1.609375 0l0 4.65625l-1.390625 0zm1.390625 -5.859375l2.984375 0q0.9375 0 1.46875 -0.1875q0.546875 -0.203125 0.828125 -0.640625q0.28125 -0.4375 0.28125 -0.9375q0 -0.75 -0.546875 -1.21875q-0.53125 -0.484375 -1.703125 -0.484375l-3.3125 0l0 3.46875zm13.990524 5.859375l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.1667328 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm11.7387085 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm12.4262085 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm15.334259 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.818146 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm8.697968 3.796875l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm14.131165 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.2917175 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 19.75853l258.01578 0l0 35.905514l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 19.75853l258.01578 0l0 35.905514l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m485.71155 42.83853l0 -9.25l-3.453125 0l0 -1.234375l8.3125 0l0 1.234375l-3.46875 0l0 9.25l-1.390625 0zm5.5646057 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849396 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073425 -5.203125l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.2655945 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.5468445 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354218 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.5043335 3.796875l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.64679 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994812 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm11.400085 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm12.3793335 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.166199 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.203125 -4.078125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751465 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291687 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849426 -2.78125l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.182373 0.171875l3.046875 -10.84375l1.03125 0l-3.046875 10.84375l-1.03125 0zm5.1500854 -0.171875l0 -10.484375l3.9375 0q1.203125 0 1.921875 0.3125q0.734375 0.3125 1.140625 0.984375q0.40625 0.65625 0.40625 1.375q0 0.671875 -0.375 1.265625q-0.359375 0.59375 -1.09375 0.96875q0.953125 0.28125 1.46875 0.953125q0.515625 0.671875 0.515625 1.578125q0 0.75 -0.3125 1.390625q-0.3125 0.625 -0.78125 0.96875q-0.453125 0.34375 -1.140625 0.515625q-0.6875 0.171875 -1.6875 0.171875l-4.0 0zm1.390625 -6.078125l2.265625 0q0.921875 0 1.3125 -0.125q0.53125 -0.15625 0.796875 -0.515625q0.28125 -0.375 0.28125 -0.921875q0 -0.53125 -0.25 -0.921875q-0.25 -0.390625 -0.71875 -0.53125q-0.46875 -0.15625 -1.59375 -0.15625l-2.09375 0l0 3.171875zm0 4.84375l2.609375 0q0.671875 0 0.9375 -0.046875q0.484375 -0.09375 0.796875 -0.296875q0.328125 -0.203125 0.53125 -0.578125q0.21875 -0.390625 0.21875 -0.890625q0 -0.578125 -0.3125 -1.015625q-0.296875 -0.4375 -0.828125 -0.609375q-0.53125 -0.171875 -1.53125 -0.171875l-2.421875 0l0 3.609375zm8.259949 1.234375l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.291748 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm5.21875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686035 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2126465 0.21875q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.791687 1.546875l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 113.37795l258.01578 0l0 35.90551l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 113.37795l258.01578 0l0 35.90551l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m486.41956 136.45795l0 -10.484367l1.4375 0l5.5 8.234367l0 -8.234367l1.328125 0l0 10.484367l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.8906174l1.28125 -0.765625l0 2.6562424l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.2187805 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.5312805 5.84375l-1.34375 0zm8.693634 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484367l1.296875 0l0 5.9687424l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.7656174 0.125 -1.1406174q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9374924l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm7.831421 0l0 -10.484367l1.28125 0l0 3.7499924q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm8.151123 -9.015617l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015617l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.0217896 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm7.322937 3.9375l0 -10.484367l1.28125 0l0 3.7499924q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.651123 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.3494263 3.15625l0 -10.484367l1.28125 0l0 10.484367l-1.28125 0zm8.49054 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.656311 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.000061 -1.0625 2.578186 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.656311 0q0.0625 1.25 0.70318604 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234436 -2.078125l4.250061 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.60943604 0.59375 -0.67193604 1.59375zm9.291748 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484367l1.28125 0l0 10.484367l-1.28125 0zm7.09375 -5.109375q0 -2.609375 1.40625 -4.0781174q1.40625 -1.484375 3.625 -1.484375q1.453125 0 2.609375 0.703125q1.171875 0.6875 1.78125 1.9218674q0.609375 1.234375 0.609375 2.8125q0 1.59375 -0.640625 2.859375q-0.640625 1.265625 -1.828125 1.90625q-1.171875 0.640625 -2.546875 0.640625q-1.46875 0 -2.640625 -0.703125q-1.171875 -0.71875 -1.78125 -1.953125q-0.59375 -1.25 -0.59375 -2.625zm1.4375 0.015625q0 1.90625 1.015625 3.0q1.015625 1.078125 2.5625 1.078125q1.5625 0 2.578125 -1.09375q1.015625 -1.109375 1.015625 -3.125q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.96875 -1.265625 -1.4843674q-0.828125 -0.53125 -1.875 -0.53125q-1.46875 0 -2.53125 1.015625q-1.0625 1.0156174 -1.0625 3.3749924zm10.368835 5.09375l0 -10.484367l1.4375 0l5.5 8.234367l0 -8.234367l1.328125 0l0 10.484367l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm10.58429 0l0 -10.484367l1.4375 0l5.5 8.234367l0 -8.234367l1.328125 0l0 10.484367l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.537415 0l4.0625 -5.46875l-3.578125 -5.0156174l1.65625 0l1.890625 2.6874924q0.59375 0.828125 0.84375 1.28125q0.359375 -0.5625 0.84375 -1.1875l2.109375 -2.7812424l1.5 0l-3.6875 4.9374924l3.984375 5.546875l-1.71875 0l-2.640625 -3.75q-0.21875 -0.3125 -0.46875 -0.6875q-0.34375 0.5625 -0.5 0.78125l-2.625 3.65625l-1.671875 0zm14.722595 0l0 -10.484367l1.28125 0l0 10.484367l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 206.99738l349.0079 0l0 35.905518l-349.0079 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 206.99738l349.0079 0l0 35.905518l-349.0079 0z" fill-rule="evenodd"/><path fill="#000000" d="m491.54065 226.70238l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm12.900604 2.21875l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2438354 1.15625l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099396 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135498 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm12.17749 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.2874146 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354187 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.3637085 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693665 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291687 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm7.831421 0l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm8.151123 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.0217896 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm7.322998 3.9375l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.651062 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.3494263 3.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.49054 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.291687 4.53125l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm7.09375 -5.109375q0 -2.609375 1.40625 -4.078125q1.40625 -1.484375 3.625 -1.484375q1.453125 0 2.609375 0.703125q1.171875 0.6875 1.78125 1.921875q0.609375 1.234375 0.609375 2.8125q0 1.59375 -0.640625 2.859375q-0.640625 1.265625 -1.828125 1.90625q-1.171875 0.640625 -2.546875 0.640625q-1.46875 0 -2.640625 -0.703125q-1.171875 -0.71875 -1.78125 -1.953125q-0.59375 -1.25 -0.59375 -2.625zm1.4375 0.015625q0 1.90625 1.015625 3.0q1.015625 1.078125 2.5625 1.078125q1.5625 0 2.578125 -1.09375q1.015625 -1.109375 1.015625 -3.125q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.96875 -1.265625 -1.484375q-0.828125 -0.53125 -1.875 -0.53125q-1.46875 0 -2.53125 1.015625q-1.0625 1.015625 -1.0625 3.375zm10.368835 5.09375l0 -10.484375l1.437561 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.500061 -8.25l0 8.25l-1.34375 0zm10.58429 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.537415 0l4.0625 -5.46875l-3.578125 -5.015625l1.65625 0l1.890625 2.6875q0.59375 0.828125 0.84375 1.28125q0.359375 -0.5625 0.84375 -1.1875l2.109375 -2.78125l1.5 0l-3.6875 4.9375l3.984375 5.546875l-1.71875 0l-2.640625 -3.75q-0.21875 -0.3125 -0.46875 -0.6875q-0.34375 0.5625 -0.5 0.78125l-2.625 3.65625l-1.671875 0zm14.722595 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.3806763 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 300.96063l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 300.96063l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m484.35364 324.04062l0 -10.484375l2.078125 0l2.484375 7.421875q0.34375 1.03125 0.5 1.546875q0.1875 -0.5625 0.5625 -1.671875l2.515625 -7.296875l1.859375 0l0 10.484375l-1.328125 0l0 -8.78125l-3.046875 8.78125l-1.265625 0l-3.03125 -8.9375l0 8.9375l-1.328125 0zm12.083771 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.396759 0l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm11.0 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2438354 1.15625l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.5780945 -0.265625 -0.8749695 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.2812195 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.151123 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm10.7699585 0.734375q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619812 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034607 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729187 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm19.11322 2.4375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.072998 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.3637085 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666687 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.651123 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.58374 3.15625l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128845 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51706 354.9895l239.02365 0l0 35.905518l-239.02365 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51706 354.9895l239.02365 0l0 35.905518l-239.02365 0z" fill-rule="evenodd"/><path fill="#000000" d="m488.73734 378.0695l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm13.181854 4.5625q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.6562195 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q0.9999695 -1.0625 2.5780945 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 4.53125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm10.963623 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm10.409546 -2.953125l0 -1.234375l4.4375 -0.015625l0 3.90625q-1.03125 0.8125 -2.125 1.21875q-1.078125 0.40625 -2.21875 0.40625q-1.546875 0 -2.8125 -0.65625q-1.265625 -0.65625 -1.90625 -1.90625q-0.640625 -1.265625 -0.640625 -2.8125q0 -1.53125 0.640625 -2.84375q0.640625 -1.328125 1.84375 -1.96875q1.203125 -0.65625 2.765625 -0.65625q1.140625 0 2.0625 0.375q0.921875 0.359375 1.4375 1.03125q0.53125 0.65625 0.796875 1.703125l-1.25 0.34375q-0.234375 -0.796875 -0.59375 -1.25q-0.34375 -0.46875 -1.0 -0.734375q-0.65625 -0.28125 -1.4375 -0.28125q-0.953125 0 -1.65625 0.296875q-0.6875 0.28125 -1.125 0.765625q-0.421875 0.46875 -0.65625 1.03125q-0.390625 0.96875 -0.390625 2.109375q0 1.40625 0.46875 2.359375q0.484375 0.9375 1.40625 1.390625q0.9375 0.453125 1.96875 0.453125q0.90625 0 1.765625 -0.34375q0.859375 -0.34375 1.296875 -0.734375l0 -1.953125l-3.078125 0zm6.3063965 4.109375l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849365 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 6.71875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 3.75l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm14.6762085 3.078125q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm2.4274902 -3.078125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666748 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.651123 -3.15625l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.58374 3.15625l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.6875 5.34375l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 405.15485l292.34647 0l0 35.905518l-292.34647 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 405.15485l292.34647 0l0 35.905518l-292.34647 0z" fill-rule="evenodd"/><path fill="#000000" d="m484.4452 428.23486l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.3281555 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.2031555 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693634 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm7.815857 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161865 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm2.447998 3.8125l2.78125 -3.953125l-2.578125 -3.640625l1.609375 0l1.171875 1.78125q0.328125 0.5 0.53125 0.84375q0.3125 -0.46875 0.578125 -0.828125l1.28125 -1.796875l1.53125 0l-2.625 3.578125l2.828125 4.015625l-1.578125 0l-1.5625 -2.359375l-0.40625 -0.640625l-2.0 3.0l-1.5625 0zm8.1875 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193115 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.166748 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm16.593262 0l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2750854 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.276123 3.796875l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.20929 0l0 -0.953125q-0.71881104 1.125 -2.125061 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53131104 0.28125 0.87506104 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.062561 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.437561 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59381104 -0.734375 -1.484436 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291748 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 502.98163l277.0079 0l0 35.905518l-277.0079 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 502.98163l277.0079 0l0 35.905518l-277.0079 0z" fill-rule="evenodd"/><path fill="#000000" d="m484.49762 526.06165l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.64679 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994843 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.665741 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693665 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291687 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128845 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm12.737732 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.666687 2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2405396 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104187 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.6762085 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.2751465 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.276062 3.796875l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.20929 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291748 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916687 0.625l1.250061 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.85943604 -1.140625 -0.85943604 -2.71875q0 -1.09375 0.390625 -2.0q0.40631104 -0.921875 1.140686 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.82818604 -0.609375 -0.79693604 -1.84375zm1.062561 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 600.33594l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 600.33594l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m485.3283 623.41595l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657104 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.3281555 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.5312805 5.84375l-1.34375 0zm8.693634 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm8.206482 0l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8375854 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm12.035706 8.40625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197998 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.937012 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 699.07874l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 699.07874l258.01578 0l0 35.905518l-258.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m488.45197 722.15875l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646759 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.665741 1.15625l-2.3281555 -7.59375l1.328125 0l1.2031555 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.91571 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128784 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm8.171875 2.265625l0 -1.46875l1.46875 0l0 1.46875q0 0.8125 -0.28125 1.296875q-0.28125 0.5 -0.90625 0.78125l-0.359375 -0.546875q0.40625 -0.1875 0.59375 -0.53125q0.203125 -0.34375 0.21875 -1.0l-0.734375 0zm7.300232 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2125854 -1.625l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm11.7543335 3.796875l0 -10.484375l1.390625 0l0 10.484375l-1.390625 0zm3.8375854 0l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75z" fill-rule="nonzero"/><path fill="#ffffff" d="m188.08005 817.6982l181.95276 0l0 58.04724l-181.95276 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m188.08005 817.6982l181.95276 0l0 58.04724l-181.95276 0z" fill-rule="evenodd"/><path fill="#000000" d="m200.95192 840.77814l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.646774 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994858 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657257 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.693649 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291733 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.915741 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.6198425 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034576 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.49054 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.7292175 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128845 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240509 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.1667175 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.3806458 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm14.618866 0.5625l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0z" fill-rule="nonzero"/><path fill="#000000" d="m221.12871 858.77814l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm6.3543854 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm5.3655243 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.171875 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm8.229233 3.8125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm6.807358 0q0 -2.109375 1.171875 -3.125q0.98439026 -0.84375 2.3906403 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.5937653 0 -2.5781403 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.64064026 0.71875 1.6093903 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.6093903 0.71875q-0.625 0.71875 -0.625 2.1875zm6.338608 3.96875l3.046875 -10.84375l1.03125 0l-3.046875 10.84375l-1.03125 0zm8.196991 -0.171875l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm6.35437 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7405396 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm11.91571 5.171875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.979248 3.75l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.4118958 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 -5.203125l0 -1.484375l1.296875 0l0 1.484375l-1.296875 0zm-1.625 11.953125l0.25 -1.09375q0.375 0.09375 0.59375 0.09375q0.40625 0 0.59375 -0.265625q0.1875 -0.25 0.1875 -1.296875l0 -7.984375l1.296875 0l0 8.015625q0 1.390625 -0.375 1.953125q-0.453125 0.703125 -1.53125 0.703125q-0.53125 0 -1.015625 -0.125zm10.099915 -5.40625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135468 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm5.1875 1.625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875z" fill-rule="nonzero"/><path fill="#ffffff" d="m188.08136 970.8018l72.62991 0l0 35.905518l-72.62991 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m188.08136 970.8018l72.62991 0l0 35.905518l-72.62991 0z" fill-rule="evenodd"/><path fill="#000000" d="m207.5549 993.88184l0 -10.484375l3.9375 0q1.203125 0 1.921875 0.3125q0.734375 0.3125 1.140625 0.984375q0.40625 0.65625 0.40625 1.375q0 0.671875 -0.375 1.265625q-0.359375 0.59375 -1.09375 0.96875q0.953125 0.28125 1.46875 0.953125q0.515625 0.671875 0.515625 1.578125q0 0.75 -0.3125 1.390625q-0.3125 0.625 -0.78125 0.96875q-0.453125 0.34375 -1.140625 0.515625q-0.6875 0.171875 -1.6875 0.171875l-4.0 0zm1.390625 -6.078125l2.265625 0q0.921875 0 1.3125 -0.125q0.53125 -0.15625 0.796875 -0.515625q0.28125 -0.375 0.28125 -0.921875q0 -0.53125 -0.25 -0.921875q-0.25 -0.390625 -0.71875 -0.53125q-0.46875 -0.15625 -1.59375 -0.15625l-2.09375 0l0 3.171875zm0 4.84375l2.609375 0q0.671875 0 0.9375 -0.046875q0.484375 -0.09375 0.796875 -0.296875q0.328125 -0.203125 0.53125 -0.578125q0.21875 -0.390625 0.21875 -0.890625q0 -0.578125 -0.3125 -1.015625q-0.296875 -0.4375 -0.828125 -0.609375q-0.53125 -0.171875 -1.53125 -0.171875l-2.421875 0l0 3.609375zm8.275604 -7.78125l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686493 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.5719757 1.15625l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm3.7594757 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2248993 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.490524 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375z" fill-rule="nonzero"/><path fill="#ffffff" d="m469.51968 971.55646l174.01578 0l0 35.905457l-174.01578 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m469.51968 971.55646l174.01578 0l0 35.905457l-174.01578 0z" fill-rule="evenodd"/><path fill="#000000" d="m491.67438 994.6364l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm11.353729 5.5l0 -4.4375l-4.046875 -6.046875l1.6875 0l2.0625 3.15625q0.578125 0.890625 1.0625 1.78125q0.484375 -0.828125 1.15625 -1.859375l2.03125 -3.078125l1.609375 0l-4.1875 6.046875l0 4.4375l-1.375 0zm6.7912292 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm18.55304 -1.125q0.96875 0.671875 1.78125 0.96875l-0.40625 0.96875q-1.125 -0.40625 -2.25 -1.28125q-1.171875 0.640625 -2.578125 0.640625q-1.421875 0 -2.59375 -0.671875q-1.15625 -0.6875 -1.78125 -1.9375q-0.625 -1.25 -0.625 -2.8125q0 -1.546875 0.625 -2.8125q0.640625 -1.28125 1.796875 -1.9375q1.171875 -0.671875 2.609375 -0.671875q1.453125 0 2.609375 0.6875q1.171875 0.6875 1.78125 1.9375q0.609375 1.234375 0.609375 2.796875q0 1.296875 -0.390625 2.328125q-0.390625 1.03125 -1.1875 1.796875zm-3.046875 -1.765625q1.203125 0.328125 1.984375 1.0q1.21875 -1.125 1.21875 -3.359375q0 -1.28125 -0.4375 -2.234375q-0.4375 -0.953125 -1.265625 -1.46875q-0.828125 -0.53125 -1.875 -0.53125q-1.546875 0 -2.578125 1.0625q-1.015625 1.0625 -1.015625 3.171875q0 2.046875 1.015625 3.140625q1.015625 1.09375 2.578125 1.09375q0.75 0 1.40625 -0.265625q-0.65625 -0.421875 -1.375 -0.609375l0.34375 -1.0zm10.565796 2.890625l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.16626 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.203125 -4.078125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.666748 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm17.144958 0l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.276062 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.81256104 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.42193604 0 -0.76568604 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8963013 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm5.3655396 0l-2.890625 -7.59375l1.359375 0l1.625 4.546875q0.265625 0.734375 0.5 1.53125q0.15625 -0.609375 0.46875 -1.453125l1.6875 -4.625l1.328125 0l-2.875 7.59375l-1.203125 0zm10.421875 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m835.2677 303.958l296.75598 0l0 29.921265l-296.75598 0z" fill-rule="evenodd"/><path fill="#e06666" d="m844.9396 326.53424l2.625 -0.25q0.234375 1.3125 0.953125 1.9375q0.734375 0.609375 1.96875 0.609375q1.296875 0 1.953125 -0.546875q0.671875 -0.546875 0.671875 -1.28125q0 -0.484375 -0.28125 -0.8125q-0.28125 -0.328125 -0.96875 -0.578125q-0.484375 -0.15625 -2.171875 -0.578125q-2.15625 -0.546875 -3.03125 -1.328125q-1.234375 -1.09375 -1.234375 -2.6875q0 -1.015625 0.578125 -1.90625q0.578125 -0.890625 1.65625 -1.34375q1.09375 -0.46875 2.640625 -0.46875q2.515625 0 3.78125 1.109375q1.28125 1.09375 1.34375 2.9375l-2.703125 0.109375q-0.171875 -1.03125 -0.75 -1.46875q-0.5625 -0.453125 -1.703125 -0.453125q-1.171875 0 -1.84375 0.46875q-0.421875 0.3125 -0.421875 0.84375q0 0.46875 0.40625 0.796875q0.5 0.4375 2.46875 0.90625q1.96875 0.453125 2.90625 0.953125q0.953125 0.5 1.484375 1.359375q0.53125 0.859375 0.53125 2.125q0 1.15625 -0.640625 2.15625q-0.640625 1.0 -1.8125 1.484375q-1.15625 0.484375 -2.890625 0.484375q-2.53125 0 -3.890625 -1.171875q-1.359375 -1.171875 -1.625 -3.40625zm13.1154785 -6.640625l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.9801636 -9.671875l2.359375 0l0 1.3125q1.265625 -1.53125 3.015625 -1.53125q0.9375 0 1.609375 0.390625q0.6875 0.375 1.125 1.140625q0.640625 -0.765625 1.375 -1.140625q0.75 -0.390625 1.578125 -0.390625q1.0625 0 1.796875 0.4375q0.75 0.421875 1.109375 1.265625q0.265625 0.625 0.265625 2.0l0 6.1875l-2.5625 0l0 -5.53125q0 -1.4375 -0.265625 -1.859375q-0.34375 -0.546875 -1.09375 -0.546875q-0.53125 0 -1.015625 0.328125q-0.46875 0.328125 -0.671875 0.96875q-0.203125 0.625 -0.203125 2.0l0 4.640625l-2.5625 0l0 -5.296875q0 -1.421875 -0.140625 -1.828125q-0.140625 -0.40625 -0.421875 -0.609375q-0.28125 -0.203125 -0.78125 -0.203125q-0.59375 0 -1.0625 0.328125q-0.46875 0.3125 -0.6875 0.921875q-0.203125 0.59375 -0.203125 1.984375l0 4.703125l-2.5625 0l0 -9.671875zm23.150879 9.671875l0 -1.453125q-0.53125 0.78125 -1.390625 1.234375q-0.859375 0.4375 -1.8125 0.4375q-0.96875 0 -1.75 -0.421875q-0.765625 -0.4375 -1.125 -1.203125q-0.34375 -0.78125 -0.34375 -2.140625l0 -6.125l2.5625 0l0 4.4375q0 2.046875 0.140625 2.515625q0.140625 0.453125 0.515625 0.71875q0.375 0.265625 0.953125 0.265625q0.65625 0 1.171875 -0.359375q0.515625 -0.359375 0.703125 -0.890625q0.203125 -0.53125 0.203125 -2.609375l0 -4.078125l2.546875 0l0 9.671875l-2.375 0zm5.036621 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm7.0895386 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm9.485107 -5.03125l0 2.03125l-1.75 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.4375 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015625 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.75 0zm1.7752075 -1.3125l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.5895386 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm18.161621 4.84375l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625zm19.610535 -1.625l-1.546875 1.96875q-1.140625 -0.546875 -2.1875 -1.53125q-0.8125 0.734375 -1.71875 1.078125q-0.90625 0.34375 -2.171875 0.34375q-2.5 0 -3.75 -1.40625q-0.984375 -1.078125 -0.984375 -2.484375q0 -1.296875 0.765625 -2.3125q0.765625 -1.03125 2.296875 -1.78125q-0.6875 -0.8125 -1.03125 -1.546875q-0.34375 -0.734375 -0.34375 -1.390625q0 -1.203125 0.96875 -2.046875q0.96875 -0.84375 2.765625 -0.84375q1.734375 0 2.703125 0.890625q0.96875 0.875 0.96875 2.140625q0 0.8125 -0.484375 1.546875q-0.46875 0.734375 -1.9375 1.65625l1.859375 2.4375q0.328125 -0.578125 0.5625 -1.515625l2.3125 0.53125q-0.34375 1.21875 -0.609375 1.796875q-0.25 0.5625 -0.546875 0.953125q0.4375 0.40625 1.109375 0.890625q0.6875 0.46875 1.0 0.625zm-6.96875 -7.265625l0.6875 -0.53125q0.765625 -0.59375 0.765625 -1.171875q0 -0.5 -0.375 -0.84375q-0.359375 -0.34375 -0.984375 -0.34375q-0.609375 0 -0.96875 0.3125q-0.34375 0.296875 -0.34375 0.703125q0 0.46875 0.59375 1.140625l0.625 0.734375zm-1.0 2.78125q-0.890625 0.4375 -1.328125 1.078125q-0.4375 0.625 -0.4375 1.28125q0 0.828125 0.546875 1.359375q0.546875 0.515625 1.453125 0.515625q0.609375 0 1.15625 -0.234375q0.5625 -0.234375 1.21875 -0.78125l-2.609375 -3.21875zm14.8125 6.109375l0 -13.359375l9.90625 0l0 2.265625l-7.21875 0l0 2.953125l6.71875 0l0 2.25l-6.71875 0l0 3.640625l7.46875 0l0 2.25l-10.15625 0zm12.2247925 -9.671875l2.359375 0l0 1.3125q1.265625 -1.53125 3.015625 -1.53125q0.9375 0 1.609375 0.390625q0.6875 0.375 1.125 1.140625q0.640625 -0.765625 1.375 -1.140625q0.75 -0.390625 1.578125 -0.390625q1.0625 0 1.796875 0.4375q0.75 0.421875 1.109375 1.265625q0.265625 0.625 0.265625 2.0l0 6.1875l-2.5625 0l0 -5.53125q0 -1.4375 -0.265625 -1.859375q-0.34375 -0.546875 -1.09375 -0.546875q-0.53125 0 -1.015625 0.328125q-0.46875 0.328125 -0.671875 0.96875q-0.203125 0.625 -0.203125 2.0l0 4.640625l-2.5625 0l0 -5.296875q0 -1.421875 -0.140625 -1.828125q-0.140625 -0.40625 -0.421875 -0.609375q-0.28125 -0.203125 -0.78125 -0.203125q-0.59375 0 -1.0625 0.328125q-0.46875 0.3125 -0.6875 0.921875q-0.203125 0.59375 -0.203125 1.984375l0 4.703125l-2.5625 0l0 -9.671875zm23.150879 9.671875l0 -1.453125q-0.53125 0.78125 -1.390625 1.234375q-0.859375 0.4375 -1.8125 0.4375q-0.96875 0 -1.75 -0.421875q-0.765625 -0.4375 -1.125 -1.203125q-0.34375 -0.78125 -0.34375 -2.140625l0 -6.125l2.5625 0l0 4.4375q0 2.046875 0.140625 2.515625q0.140625 0.453125 0.515625 0.71875q0.375 0.265625 0.953125 0.265625q0.65625 0 1.171875 -0.359375q0.515625 -0.359375 0.703125 -0.890625q0.203125 -0.53125 0.203125 -2.609375l0 -4.078125l2.546875 0l0 9.671875l-2.375 0zm5.036621 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm7.0895386 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm9.485168 -5.03125l0 2.03125l-1.750061 0l0 3.90625q0 1.1875 0.046875 1.390625q0.046875 0.1875 0.21875 0.3125q0.1875 0.125 0.43756104 0.125q0.359375 0 1.03125 -0.25l0.21875 2.0q-0.890625 0.375 -2.015686 0.375q-0.703125 0 -1.265625 -0.234375q-0.546875 -0.234375 -0.8125 -0.59375q-0.25 -0.375 -0.34375 -1.0q-0.09375 -0.453125 -0.09375 -1.8125l0 -4.21875l-1.171875 0l0 -2.03125l1.171875 0l0 -1.921875l2.578125 -1.5l0 3.421875l1.750061 0zm1.7751465 -1.3125l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm4.5894775 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm18.161621 4.84375l-2.5625 0l0 -4.9375q0 -1.5625 -0.171875 -2.015625q-0.15625 -0.46875 -0.53125 -0.71875q-0.359375 -0.265625 -0.875 -0.265625q-0.671875 0 -1.203125 0.375q-0.53125 0.359375 -0.734375 0.96875q-0.1875 0.59375 -0.1875 2.21875l0 4.375l-2.546875 0l0 -9.671875l2.375 0l0 1.421875q1.265625 -1.640625 3.1875 -1.640625q0.84375 0 1.546875 0.3125q0.703125 0.296875 1.0625 0.78125q0.359375 0.46875 0.5 1.078125q0.140625 0.59375 0.140625 1.703125l0 6.015625zm7.8137207 0l0 -13.359375l9.15625 0l0 2.265625l-6.453125 0l0 3.15625l5.5625 0l0 2.265625l-5.5625 0l0 5.671875l-2.703125 0zm11.364746 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm4.5894775 -4.96875q0 -1.28125 0.625 -2.46875q0.625 -1.203125 1.78125 -1.828125q1.15625 -0.625 2.578125 -0.625q2.1875 0 3.59375 1.421875q1.40625 1.421875 1.40625 3.609375q0 2.1875 -1.421875 3.640625q-1.421875 1.4375 -3.5625 1.4375q-1.328125 0 -2.546875 -0.59375q-1.203125 -0.609375 -1.828125 -1.765625q-0.625 -1.171875 -0.625 -2.828125zm2.625 0.125q0 1.453125 0.671875 2.21875q0.6875 0.75 1.6875 0.75q1.0 0 1.671875 -0.75q0.6875 -0.765625 0.6875 -2.234375q0 -1.421875 -0.6875 -2.1875q-0.671875 -0.765625 -1.671875 -0.765625q-1.0 0 -1.6875 0.765625q-0.671875 0.765625 -0.671875 2.203125zm11.161621 4.84375l-3.0625 -9.671875l2.484375 0l1.8125 6.34375l1.671875 -6.34375l2.46875 0l1.609375 6.34375l1.859375 -6.34375l2.515625 0l-3.109375 9.671875l-2.453125 0l-1.671875 -6.21875l-1.640625 6.21875l-2.484375 0zm11.80835 -2.765625l2.5625 -0.390625q0.171875 0.75 0.671875 1.140625q0.5 0.390625 1.40625 0.390625q0.984375 0 1.484375 -0.375q0.34375 -0.25 0.34375 -0.671875q0 -0.296875 -0.1875 -0.484375q-0.1875 -0.1875 -0.859375 -0.34375q-3.09375 -0.6875 -3.921875 -1.25q-1.140625 -0.78125 -1.140625 -2.171875q0 -1.265625 0.984375 -2.109375q1.0 -0.859375 3.078125 -0.859375q1.984375 0 2.953125 0.65625q0.96875 0.640625 1.328125 1.90625l-2.40625 0.4375q-0.15625 -0.5625 -0.59375 -0.859375q-0.421875 -0.296875 -1.234375 -0.296875q-1.0 0 -1.4375 0.28125q-0.296875 0.203125 -0.296875 0.515625q0 0.265625 0.25 0.46875q0.34375 0.25 2.390625 0.71875q2.046875 0.453125 2.859375 1.140625q0.796875 0.671875 0.796875 1.890625q0 1.34375 -1.109375 2.296875q-1.109375 0.953125 -3.28125 0.953125q-1.984375 0 -3.140625 -0.796875q-1.140625 -0.8125 -1.5 -2.1875z" fill-rule="nonzero"/><path fill="#e06666" d="m939.8833 346.77142l0 0c0 -3.797943 3.0787964 -6.87677 6.87677 -6.87677l278.59283 0c1.8238525 0 3.572998 0.7244873 4.862671 2.0141602c1.2896729 1.2896423 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.876831 6.87677l-278.59283 0c-3.7979736 0 -6.87677 -3.078827 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m966.4187 339.89105l239.27563 0l0 32.97638l-239.27563 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m1008.09283 359.59607l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm10.088135 -5.640625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.60943604 -0.34375 1.359436 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015686 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193176 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2595215 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717285 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm17.207397 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.651123 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2562256 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm11.551147 3.9375l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.16626 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.203125 -4.078125l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2595215 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm7.666626 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0z" fill-rule="nonzero"/><path fill="#e06666" d="m939.8833 447.19138l0 0c0 -3.797943 3.0787964 -6.8768005 6.87677 -6.8768005l278.59283 0c1.8238525 0 3.572998 0.7245178 4.862671 2.0141602c1.2896729 1.2896729 2.0141602 3.0387878 2.0141602 4.8626404l0 27.506287c0 3.797943 -3.0788574 6.87677 -6.876831 6.87677l-278.59283 0c-3.7979736 0 -6.87677 -3.078827 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m966.4187 440.311l239.27563 0l0 32.97638l-239.27563 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m1035.8632 463.39102l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.213135 5.5l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm11.947998 2.8125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm16.207397 1.75l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 5.6875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 6.65625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.479248 1.484375l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2562256 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#e06666" d="m1137.4801 656.1574l0 0c0 -4.2473145 3.4431152 -7.6904297 7.6904297 -7.6904297l122.79236 0c2.0396729 0 3.9957275 0.8102417 5.4379883 2.2525024c1.4422607 1.4421997 2.2524414 3.3983154 2.2524414 5.4379272l0 30.760864c0 4.2473145 -3.4431152 7.6904297 -7.6904297 7.6904297l-122.79236 0l0 0c-4.2473145 0 -7.6904297 -3.4431152 -7.6904297 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1141.1812 644.96716l130.7716 0l0 36.472412l-130.7716 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m1162.0844 668.0472l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.213135 5.5l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm11.947998 2.8125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099365 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.23877 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.70874 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2281494 1.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7718506 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m1159.8834 689.1253q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm2.4276123 -3.078125l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666626 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm6.682373 1.375l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm6.568115 3.15625l-1.203125 0l0 -10.484375l1.296875 0l0 3.734375q0.8125 -1.015625 2.078125 -1.015625q0.703125 0 1.328125 0.28125q0.625 0.28125 1.03125 0.796875q0.40625 0.5 0.625 1.234375q0.234375 0.71875 0.234375 1.53125q0 1.96875 -0.96875 3.03125q-0.953125 1.0625 -2.3125 1.0625q-1.34375 0 -2.109375 -1.125l0 0.953125zm-0.015625 -3.859375q0 1.375 0.375 1.984375q0.609375 0.984375 1.640625 0.984375q0.84375 0 1.453125 -0.734375q0.625 -0.734375 0.625 -2.1875q0 -1.484375 -0.59375 -2.1875q-0.59375 -0.71875 -1.421875 -0.71875q-0.84375 0 -1.46875 0.734375q-0.609375 0.734375 -0.609375 2.125zm6.916748 6.78125l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm6.890625 -6.078125l0 -1.28125l3.953125 0l0 1.28125l-3.953125 0zm5.380615 3.15625l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.666748 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm8.026123 7.609375l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#e06666" d="m897.4632 655.9718l0 0c0 -4.2473145 3.4431152 -7.6904297 7.6904297 -7.6904297l122.79242 0c2.0395508 0 3.9957275 0.8102417 5.437866 2.2525024c1.4422607 1.4421997 2.2525635 3.3983154 2.2525635 5.4379272l0 30.760864c0 4.2473145 -3.4431152 7.6904297 -7.6904297 7.6904297l-122.79242 0l0 0c-4.2473145 0 -7.6904297 -3.4431152 -7.6904297 -7.6904297z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m901.164 645.1514l130.77167 0l0 40.503967l-130.77167 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m922.06726 668.23145l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.213074 5.5l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.182373 7.4375l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm11.947937 2.8125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.291748 3.8125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm10.099426 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm11.2387085 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.70874 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2282104 1.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7717896 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#ffffff" d="m939.8362 689.3096q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm1.9119263 -5.34375l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm10.65625 1.109375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686646 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm6.2125854 -1.625l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 2.78125l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm8.135437 6.875l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625z" fill-rule="nonzero"/><path fill="#e06666" d="m939.88336 822.0936l0 0c0 -3.7979126 3.0787964 -6.87677 6.87677 -6.87677l278.5929 0c1.8238525 0 3.572998 0.7244873 4.862671 2.0141602c1.2895508 1.2896118 2.0141602 3.0387573 2.0141602 4.86261l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.876831 6.87677l-278.5929 0c-3.7979736 0 -6.87677 -3.0788574 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m947.6798 815.51447l277.0078 0l0 24.944824l-277.0078 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m973.3106 838.5944l0 -10.484375l7.59375 0l0 1.234375l-6.203125 0l0 3.203125l5.796875 0l0 1.234375l-5.796875 0l0 3.578125l6.4375 0l0 1.234375l-7.828125 0zm9.588135 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193115 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2594604 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7717896 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm14.676147 3.078125q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm2.4118652 -3.078125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm7.7088623 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2281494 1.15625l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7718506 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm13.05249 3.078125l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625zm13.093262 -3.078125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.651123 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm11.55127 3.9375l0 -10.484375l3.96875 0q1.046875 0 1.59375 0.09375q0.765625 0.125 1.28125 0.484375q0.53125 0.359375 0.84375 1.015625q0.328125 0.65625 0.328125 1.4375q0 1.328125 -0.859375 2.265625q-0.84375 0.921875 -3.078125 0.921875l-2.6875 0l0 4.265625l-1.390625 0zm1.390625 -5.5l2.71875 0q1.34375 0 1.90625 -0.5q0.5625 -0.5 0.5625 -1.40625q0 -0.671875 -0.328125 -1.140625q-0.328125 -0.46875 -0.875 -0.609375q-0.359375 -0.09375 -1.296875 -0.09375l-2.6875 0l0 3.75zm8.16626 8.421875l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm10.546875 -2.921875l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm11.013184 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2248535 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240601 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7750244 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0z" fill-rule="nonzero"/><path fill="#e06666" d="m939.8833 545.404l0 0c0 -3.7979736 3.0787964 -6.876831 6.87677 -6.876831l278.59283 0c1.8238525 0 3.572998 0.72454834 4.862671 2.0141602c1.2896729 1.2896729 2.0141602 3.0388184 2.0141602 4.862671l0 27.506287c0 3.7979126 -3.0788574 6.87677 -6.876831 6.87677l-278.59283 0c-3.7979736 0 -6.87677 -3.0788574 -6.87677 -6.87677z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m966.4187 538.5236l239.27563 0l0 32.97638l-239.27563 0z" fill-rule="evenodd"/><path fill="#ffffff" d="m986.92786 558.22864l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm10.088135 -5.640625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.193115 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2593994 -7.859375l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm2.7718506 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.307373 3.796875l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm14.676147 3.078125q-1.0625 -1.34375 -1.796875 -3.140625q-0.734375 -1.8125 -0.734375 -3.734375q0 -1.703125 0.546875 -3.265625q0.640625 -1.8125 1.984375 -3.609375l0.921875 0q-0.859375 1.484375 -1.140625 2.125q-0.4375 0.984375 -0.6875 2.0625q-0.296875 1.34375 -0.296875 2.6875q0 3.453125 2.125 6.875l-0.921875 0zm7.380615 -5.859375l1.265625 0.15625q-0.203125 1.3125 -1.0625 2.0625q-0.84375 0.734375 -2.09375 0.734375q-1.5625 0 -2.515625 -1.015625q-0.9375 -1.03125 -0.9375 -2.921875q0 -1.234375 0.40625 -2.15625q0.40625 -0.921875 1.234375 -1.375q0.84375 -0.46875 1.8125 -0.46875q1.25 0 2.03125 0.625q0.78125 0.625 1.015625 1.765625l-1.265625 0.203125q-0.171875 -0.765625 -0.625 -1.15625q-0.453125 -0.390625 -1.09375 -0.390625q-0.984375 0 -1.59375 0.703125q-0.609375 0.703125 -0.609375 2.203125q0 1.53125 0.578125 2.234375q0.59375 0.6875 1.546875 0.6875q0.75 0 1.265625 -0.453125q0.515625 -0.46875 0.640625 -1.4375zm2.375 5.6875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 6.65625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.479248 1.484375l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561035 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm13.052612 3.078125l-0.921875 0q2.140625 -3.421875 2.140625 -6.875q0 -1.34375 -0.3125 -2.671875q-0.25 -1.0625 -0.671875 -2.046875q-0.28125 -0.65625 -1.15625 -2.15625l0.921875 0q1.34375 1.796875 1.984375 3.609375q0.546875 1.5625 0.546875 3.265625q0 1.921875 -0.734375 3.734375q-0.734375 1.796875 -1.796875 3.140625zm13.093262 -3.078125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.651001 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm7.84375 -6.75l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2562256 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3125 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5625 0 -2.421875 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.765625 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.15625q-0.78125 0.4375 -1.921875 0.4375q-1.359375 0 -2.203125 -0.609375q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.484375 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.859375 0 -1.46875 0.703125q-0.59375 0.6875 -0.59375 2.0625zm19.035522 0.265625l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm5.6468506 1.96875l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0zm8.55896 0l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m279.025 12.011797l0.03149414 45.66929" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.02502 12.011797l0.02734375 39.66929" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.40063 51.682228l1.6548767 4.536957l1.6485901 -4.539234z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 98.93976l0 57.259842" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 98.93976l0 51.259842" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 150.1996l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 196.99619l0 57.259842" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 196.99619l0 51.259842" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 248.25603l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.04074 295.2075l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.04077 295.20746l0 51.259857" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.38904 346.46732l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 393.41876l0 57.259857" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 393.41876l0 51.259827" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 444.6786l1.6517334 4.5381165l1.6517334 -4.5381165z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.04074 491.62872l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.04077 491.62872l0 51.259827" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.38904 542.88855l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 589.8413l0 57.259888" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 589.8413l0 51.259888" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 641.1012l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.0565 687.89777l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.05652 687.89777l0 51.259888" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.4048 739.15765l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m160.92908 785.225l0 122.17322" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m160.92908 785.225l0 116.17322" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.27734 901.3982l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m401.9462 785.6417l0 121.3858" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m401.9462 785.6417l0 115.385864" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m400.29446 901.0276l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m161.2598 956.2971l-0.6614227 66.42523" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m161.2598 956.2972l-0.6016846 60.425537" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.00647 1016.70624l1.6064606 4.5543213l1.6968384 -4.5214844z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m401.94745 954.78796l-0.6614075 66.42517" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m401.94748 954.78796l-0.6016846 60.425537" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m399.69415 1015.197l1.6064453 4.5543213l1.6968384 -4.5214233z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m158.72704 988.7546l29.354324 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m162.51703 988.7546l21.774338 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.22704 988.7546c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m187.58136 988.7546c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m158.73753 846.3123l29.354324 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m162.52753 846.3123l21.774338 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m159.23753 846.3123c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m187.59186 846.3123c0 0.9085083 -0.7364807 1.6450195 -1.644989 1.6450195c-0.9085083 0 -1.6450043 -0.73651123 -1.6450043 -1.6450195c0 -0.9085083 0.736496 -1.6449585 1.6450043 -1.6449585c0.9085083 0 1.644989 0.7364502 1.644989 1.6449585z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m370.0328 846.7218l33.984253 0.03149414" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m373.82278 846.72534l26.404297 0.024414062" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m370.5328 846.7222c8.544922E-4 -0.9085083 0.7380066 -1.6442871 1.6465149 -1.6434326c0.9085083 7.9345703E-4 1.6443176 0.7379761 1.6434937 1.6464844c-8.544922E-4 0.9085083 -0.7380371 1.6443481 -1.6465454 1.6434937c-0.9085083 -8.544922E-4 -1.6443176 -0.7380371 -1.6434631 -1.6465454z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m403.51706 846.7528c-8.544922E-4 0.9085083 -0.7380066 1.6443481 -1.6465149 1.6434937c-0.9085083 -8.544922E-4 -1.6443176 -0.7380371 -1.6434937 -1.6465454c8.544922E-4 -0.9085083 0.7380371 -1.6442871 1.6465454 -1.6434326c0.9085083 7.9345703E-4 1.6443176 0.7379761 1.6434631 1.6464844z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m399.25198 990.2651l70.2677 -0.7558594" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m403.04175 990.2243l62.68817 -0.67437744" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m399.75195 990.2597c-0.009796143 -0.90844727 0.71875 -1.652832 1.6271973 -1.6625977c0.90844727 -0.009765625 1.652832 0.71875 1.6625977 1.6271973c0.009765625 0.90844727 -0.71875 1.652832 -1.6271973 1.6625977c-0.9084778 0.009765625 -1.652832 -0.71875 -1.6625977 -1.6271973z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.0197 989.5146c0.009765625 0.90844727 -0.71875 1.652832 -1.6271973 1.6625977c-0.9084778 0.009765625 -1.652832 -0.71875 -1.6625977 -1.6272583c-0.009796143 -0.90844727 0.71875 -1.652771 1.6271973 -1.6625977c0.90844727 -0.009765625 1.652832 0.71875 1.6625977 1.6272583z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5118 715.9921l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.30176 716.0126l185.42798 0.9984741" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0118 715.9948c0.0048828125 -0.9085083 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9084778 0.0048828125 1.6409912 0.7453613 1.6361084 1.6538086c-0.0048828125 0.9085083 -0.7453308 1.6410522 -1.6538391 1.6361694c-0.9084778 -0.0049438477 -1.6410217 -0.7453613 -1.6361084 -1.6538696z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01968 717.0288c-0.0048828125 0.9085083 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6409912 -0.7453613 -1.6361084 -1.6538086c0.0048828125 -0.9085083 0.7453308 -1.6410522 1.6538391 -1.6361694c0.9085083 0.0049438477 1.6410217 0.7453613 1.6361084 1.6538696z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 617.9488l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 617.96924l185.42798 0.99853516" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 617.95154c0.0048828125 -0.9085083 0.7453308 -1.6410522 1.6538391 -1.6361694c0.9084778 0.0049438477 1.6409912 0.7453613 1.6361084 1.6538696c-0.0048828125 0.9085083 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453613 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 618.9855c-0.0048828125 0.9085083 -0.7453308 1.6410522 -1.6538391 1.6361694c-0.9084778 -0.0049438477 -1.6409912 -0.7453613 -1.6361084 -1.6538696c0.0048828125 -0.9085083 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9085083 0.0048828125 1.6410217 0.7453613 1.6361084 1.6538086z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 519.7402l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 519.76056l185.42798 0.99853516" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 519.74286c0.0048828125 -0.9085083 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9084778 0.0048828125 1.6409912 0.7453003 1.6361084 1.6538086c-0.0048828125 0.9085083 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453003 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 520.77686c-0.0048828125 0.90844727 -0.7453308 1.6409912 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6409912 -0.7453613 -1.6361084 -1.6538696c0.0048828125 -0.90844727 0.7453308 -1.6409912 1.6538391 -1.6361084c0.9085083 0.0048828125 1.6410217 0.7453613 1.6361084 1.6538696z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 420.19028l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 420.21072l185.42798 0.99850464" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 420.193c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9084778 0.00491333 1.6409912 0.7453613 1.6361084 1.6538391c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453308 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 421.22696c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361389c-0.9084778 -0.00491333 -1.6409912 -0.7453613 -1.6361084 -1.6538391c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9085083 0.00491333 1.6410217 0.7453613 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 317.64172l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 317.66214l185.42798 0.9985657" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 317.64444c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9084778 0.00491333 1.6409912 0.7453308 1.6361084 1.6538391c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453308 -1.6361084 -1.6538086z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 318.6784c-0.0048828125 0.9085083 -0.7453308 1.6410217 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6409912 -0.7453308 -1.6361084 -1.6538086c0.0048828125 -0.9085083 0.7453308 -1.6410217 1.6538391 -1.6361389c0.9085083 0.00491333 1.6410217 0.7453613 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 225.1063l193.00787 1.0393829" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 225.12671l185.42798 0.9985504" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 225.109c0.0048828125 -0.90849304 0.7453308 -1.6410065 1.6538391 -1.6361237c0.9084778 0.0048980713 1.6409912 0.74534607 1.6361084 1.6538391c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361237c-0.9084778 -0.0048980713 -1.6410217 -0.74534607 -1.6361084 -1.6538391z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 226.14297c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361237c-0.9084778 -0.0048980713 -1.6409912 -0.74534607 -1.6361084 -1.6538391c0.0048828125 -0.90849304 0.7453308 -1.6410065 1.6538391 -1.6361237c0.9085083 0.0048980713 1.6410217 0.74534607 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 130.81102l193.00787 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 130.83144l185.42798 0.9985504" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 130.81372c0.0048828125 -0.90849304 0.7453308 -1.6410217 1.6538391 -1.6361237c0.9084778 0.0048980713 1.6409912 0.74534607 1.6361084 1.6538391c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361084c-0.9084778 -0.0048828125 -1.6410217 -0.7453308 -1.6361084 -1.6538239z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 131.8477c-0.0048828125 0.90849304 -0.7453308 1.6410065 -1.6538391 1.6361237c-0.9084778 -0.0048980713 -1.6409912 -0.74534607 -1.6361084 -1.6538391c0.0048828125 -0.90849304 0.7453308 -1.6410065 1.6538391 -1.6361237c0.9085083 0.0048980713 1.6410217 0.74534607 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.5092 37.1916l193.00787 1.0393715" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m280.29913 37.212013l185.42798 0.9985466" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m277.0092 37.194294c0.0048828125 -0.90849686 0.7453308 -1.6410103 1.6538391 -1.6361198c0.9084778 0.0048942566 1.6409912 0.74534225 1.6361084 1.6538353c-0.0048828125 0.90849686 -0.7453308 1.6410103 -1.6538391 1.6361198c-0.9084778 -0.0048942566 -1.6410217 -0.74534225 -1.6361084 -1.6538353z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m469.01706 38.22828c-0.0048828125 0.90849304 -0.7453308 1.6410103 -1.6538391 1.636116c-0.9084778 -0.004890442 -1.6409912 -0.74533844 -1.6361084 -1.6538315c0.0048828125 -0.90849686 0.7453308 -1.6410103 1.6538391 -1.6361198c0.9085083 0.0048942566 1.6410217 0.74533844 1.6361084 1.6538353z" fill-rule="nonzero"/><path fill="#ffffff" d="m1275.6536 486.28873l174.01575 0l0 49.637787l-174.01575 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m1275.6536 486.28873l174.01575 0l0 49.637787l-174.01575 0z" fill-rule="evenodd"/><path fill="#000000" d="m1290.8724 509.3687l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.6467285 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.9157715 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.4904785 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128906 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#000000" d="m1303.9425 527.3687l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm9.1779785 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0686035 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2595215 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm19.86377 -3.671875l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm5.6467285 1.96875l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0zm8.55896 0l0 -2.875l-2.859375 0l0 -1.203125l2.859375 0l0 -2.84375l1.21875 0l0 2.84375l2.859375 0l0 1.203125l-2.859375 0l0 2.875l-1.21875 0zm11.3342285 1.703125l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm9.1623535 0l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm9.849365 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 6.71875l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm6.994873 6.65625l0 -10.5l1.171875 0l0 0.984375q0.421875 -0.578125 0.9375 -0.859375q0.515625 -0.296875 1.265625 -0.296875q0.96875 0 1.71875 0.5q0.75 0.5 1.125 1.421875q0.375 0.90625 0.375 1.984375q0 1.171875 -0.421875 2.109375q-0.40625 0.921875 -1.21875 1.421875q-0.796875 0.5 -1.671875 0.5q-0.640625 0 -1.15625 -0.265625q-0.515625 -0.28125 -0.84375 -0.6875l0 3.6875l-1.28125 0zm1.15625 -6.65625q0 1.453125 0.59375 2.15625q0.609375 0.703125 1.453125 0.703125q0.859375 0 1.46875 -0.71875q0.609375 -0.734375 0.609375 -2.25q0 -1.453125 -0.609375 -2.171875q-0.59375 -0.734375 -1.421875 -0.734375q-0.8125 0 -1.453125 0.78125q-0.640625 0.765625 -0.640625 2.234375zm12.197998 1.296875l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m1275.6536 725.4934l174.01575 0l0 49.637817l-174.01575 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m1275.6536 725.4934l174.01575 0l0 49.637817l-174.01575 0z" fill-rule="evenodd"/><path fill="#000000" d="m1290.8724 748.5734l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm15.6467285 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0zm10.9157715 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.619873 3.796875l0 -6.59375l-1.140625 0l0 -1.0l1.140625 0l0 -0.8125q0 -0.765625 0.125 -1.140625q0.1875 -0.5 0.65625 -0.8125q0.46875 -0.3125 1.3125 -0.3125q0.546875 0 1.203125 0.125l-0.1875 1.125q-0.40625 -0.0625 -0.765625 -0.0625q-0.578125 0 -0.828125 0.25q-0.234375 0.25 -0.234375 0.9375l0 0.703125l1.46875 0l0 1.0l-1.46875 0l0 6.59375l-1.28125 0zm8.034546 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm10.4904785 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm7.729248 -3.375l1.3125 -0.109375q0.09375 0.78125 0.421875 1.296875q0.34375 0.5 1.0625 0.8125q0.71875 0.3125 1.609375 0.3125q0.796875 0 1.40625 -0.234375q0.609375 -0.234375 0.90625 -0.640625q0.296875 -0.421875 0.296875 -0.90625q0 -0.5 -0.296875 -0.859375q-0.28125 -0.375 -0.9375 -0.625q-0.421875 -0.171875 -1.875 -0.515625q-1.4375 -0.34375 -2.015625 -0.65625q-0.75 -0.390625 -1.125 -0.96875q-0.359375 -0.59375 -0.359375 -1.3125q0 -0.796875 0.4375 -1.484375q0.453125 -0.6875 1.3125 -1.046875q0.875 -0.359375 1.9375 -0.359375q1.171875 0 2.0625 0.375q0.890625 0.375 1.359375 1.109375q0.484375 0.734375 0.515625 1.65625l-1.328125 0.09375q-0.109375 -1.0 -0.734375 -1.5q-0.609375 -0.515625 -1.8125 -0.515625q-1.265625 0 -1.84375 0.46875q-0.578125 0.46875 -0.578125 1.109375q0 0.5625 0.421875 0.9375q0.390625 0.359375 2.078125 0.75q1.703125 0.375 2.328125 0.65625q0.921875 0.421875 1.359375 1.078125q0.4375 0.640625 0.4375 1.484375q0 0.84375 -0.484375 1.59375q-0.484375 0.734375 -1.390625 1.140625q-0.890625 0.40625 -2.015625 0.40625q-1.421875 0 -2.390625 -0.40625q-0.953125 -0.421875 -1.5 -1.25q-0.546875 -0.828125 -0.578125 -1.890625zm14.128906 3.375l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.244873 6.734375l-0.140625 -1.203125q0.421875 0.109375 0.734375 0.109375q0.4375 0 0.6875 -0.140625q0.265625 -0.140625 0.421875 -0.40625q0.125 -0.1875 0.390625 -0.953125q0.046875 -0.109375 0.125 -0.3125l-2.890625 -7.609375l1.390625 0l1.578125 4.390625q0.3125 0.84375 0.546875 1.765625q0.234375 -0.890625 0.53125 -1.734375l1.625 -4.421875l1.296875 0l-2.890625 7.71875q-0.46875 1.25 -0.734375 1.734375q-0.34375 0.625 -0.78125 0.921875q-0.4375 0.296875 -1.0625 0.296875q-0.375 0 -0.828125 -0.15625zm12.59375 -5.375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.380615 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#000000" d="m1297.4269 766.5734l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm9.1779785 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm6.0687256 -1.15625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm1.2593994 1.15625l0 -10.484375l1.28125 0l0 3.75q0.90625 -1.03125 2.28125 -1.03125q0.84375 0 1.46875 0.328125q0.625 0.328125 0.890625 0.921875q0.265625 0.578125 0.265625 1.703125l0 4.8125l-1.28125 0l0 -4.8125q0 -0.96875 -0.421875 -1.40625q-0.421875 -0.4375 -1.1875 -0.4375q-0.578125 0 -1.078125 0.296875q-0.5 0.296875 -0.71875 0.8125q-0.21875 0.5 -0.21875 1.390625l0 4.15625l-1.28125 0zm15.379395 0l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm11.013184 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2249756 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751465 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.96814 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm11.724487 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494751 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm2.7718506 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125z" fill-rule="nonzero"/><path fill="#ffffff" d="m996.65356 725.85565l121.35437 0l0 48.91339l-121.35437 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m996.65356 725.85565l121.35437 0l0 48.91339l-121.35437 0z" fill-rule="evenodd"/><path fill="#000000" d="m1019.03064 748.93567l0 -10.484375l7.078125 0l0 1.234375l-5.6875 0l0 3.25l4.921875 0l0 1.234375l-4.921875 0l0 4.765625l-1.390625 0zm13.702637 0l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm3.135498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.2561035 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm7.359375 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.354248 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm9.994873 3.375l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm2.6657715 1.15625l-2.328125 -7.59375l1.328125 0l1.203125 4.375l0.453125 1.640625q0.03125 -0.125 0.390625 -1.578125l1.21875 -4.4375l1.328125 0l1.125 4.40625l0.390625 1.453125l0.4375 -1.46875l1.296875 -4.390625l1.25 0l-2.375 7.59375l-1.34375 0l-1.203125 -4.546875l-0.296875 -1.296875l-1.53125 5.84375l-1.34375 0zm8.6936035 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 0l0 -10.484375l1.296875 0l0 5.96875l3.046875 -3.078125l1.671875 0l-2.90625 2.8125l3.1875 4.78125l-1.578125 0l-2.515625 -3.890625l-0.90625 0.875l0 3.015625l-1.296875 0z" fill-rule="nonzero"/><path fill="#000000" d="m1012.578 766.93567l-4.0625 -10.484375l1.5 0l2.734375 7.609375q0.328125 0.921875 0.546875 1.71875q0.25 -0.859375 0.5625 -1.71875l2.84375 -7.609375l1.40625 0l-4.109375 10.484375l-1.421875 0zm11.013184 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.166748 4.53125l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.89624 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2249756 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.2404785 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm6.119873 2.65625l0.1875 1.140625q-0.546875 0.109375 -0.984375 0.109375q-0.6875 0 -1.078125 -0.21875q-0.390625 -0.21875 -0.546875 -0.578125q-0.15625 -0.359375 -0.15625 -1.515625l0 -4.375l-0.953125 0l0 -1.0l0.953125 0l0 -1.890625l1.28125 -0.765625l0 2.65625l1.296875 0l0 1.0l-1.296875 0l0 4.4375q0 0.546875 0.0625 0.71875q0.078125 0.15625 0.21875 0.25q0.15625 0.078125 0.453125 0.078125q0.203125 0 0.5625 -0.046875zm0.7751465 -2.640625q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291626 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm8.968262 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm11.724487 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229126 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494873 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m425.22974 372.94266l44.283447 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m425.22974 372.94266l38.283478 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m463.5132 374.5944l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1086.0565 482.4765l0 57.259827" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m1086.0565 482.4765l0 51.259827" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1084.4048 533.7363l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1082.6456 509.52756l193.00793 1.0393677" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m1086.4357 509.54797l185.42798 0.99853516" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1083.1456 509.53024c0.0048828125 -0.9084778 0.7453613 -1.6409912 1.6538086 -1.6361084c0.90856934 0.0048828125 1.6411133 0.7453308 1.6362305 1.6538391c-0.005004883 0.9084778 -0.7453613 1.6409912 -1.6539307 1.6361084c-0.90844727 -0.0048828125 -1.6409912 -0.7453308 -1.6361084 -1.6538391z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1275.1536 510.56424c-0.0048828125 0.9084778 -0.7453613 1.6410217 -1.6538086 1.6361389c-0.90856934 -0.0049438477 -1.6409912 -0.7453613 -1.6361084 -1.6538696c0.0048828125 -0.9084778 0.74523926 -1.6409912 1.6538086 -1.6361084c0.90844727 0.0048828125 1.6409912 0.7453308 1.6361084 1.6538391z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m966.54724 694.4238l0 121.385864" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m966.54724 694.4239l0 115.3858" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m964.8955 809.8097l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m1206.5657 694.4238l0 121.385864" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m1206.5657 694.4239l0 115.3858" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1204.914 809.8097l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m962.66797 750.2966l33.984253 0.03149414" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m966.458 750.3001l26.404175 0.024475098" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m963.16797 750.29706c8.544922E-4 -0.9085083 0.7380371 -1.6443481 1.6465454 -1.6434937c0.9085083 8.544922E-4 1.6442871 0.7380371 1.6434326 1.6465454c-7.9345703E-4 0.9085083 -0.7379761 1.6442871 -1.6464844 1.6434937c-0.9085083 -8.544922E-4 -1.6443481 -0.7380371 -1.6434937 -1.6465454z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m996.1522 750.32764c-8.544922E-4 0.9085083 -0.7379761 1.6442871 -1.6464844 1.6434326c-0.9085083 -7.9345703E-4 -1.6443481 -0.7379761 -1.6434937 -1.6464844c8.544922E-4 -0.9085083 0.7379761 -1.6443481 1.6464844 -1.6434937c0.9085083 8.544922E-4 1.6443481 0.7380371 1.6434937 1.6465454z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m1202.8976 750.6903l72.75598 -0.37799072" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m1206.6876 750.6706l65.176025 -0.338562" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1203.3976 750.6877c-0.004638672 -0.9085083 0.72802734 -1.6488037 1.6364746 -1.6535034c0.90844727 -0.004760742 1.6488037 0.7279053 1.6535645 1.6364136c0.004638672 0.9085083 -0.72802734 1.6488037 -1.6364746 1.6535034c-0.90844727 0.004760742 -1.6488037 -0.7279053 -1.6535645 -1.6364136z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1275.1536 750.31494c0.004760742 0.9085083 -0.7279053 1.6488037 -1.6364746 1.6535034c-0.90844727 0.004760742 -1.6488037 -0.7279053 -1.6534424 -1.6364136c-0.004760742 -0.9085083 0.7279053 -1.6488037 1.6363525 -1.6535034c0.90856934 -0.004760742 1.6488037 0.7279053 1.6535645 1.6364136z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m-10.141732 519.0656l113.66929 0l0 36.59845l-113.66929 0z" fill-rule="evenodd"/><path fill="#93c47d" d="m10.858886 545.9856l0 -13.359375l2.703125 0l0 5.265625l5.281249 0l0 -5.265625l2.703125 0l0 13.359375l-2.703125 0l0 -5.84375l-5.281249 0l0 5.84375l-2.703125 0zm15.36357 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.3437519 0.625 0.3437519 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.531252 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm7.500719 4.640625l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm13.682144 0l-2.390625 0l0 -1.421875q-0.59375 0.828125 -1.40625 1.234375q-0.796875 0.40625 -1.609375 0.40625q-1.671875 0 -2.859375 -1.34375q-1.1875 -1.34375 -1.1875 -3.75q0 -2.453125 1.15625 -3.734375q1.15625 -1.28125 2.921875 -1.28125q1.625 0 2.8125 1.34375l0 -4.8125l2.5625 0l0 13.359375zm-6.84375 -5.046875q0 1.546875 0.4375 2.234375q0.609375 1.015625 1.71875 1.015625q0.890625 0 1.5 -0.75q0.625 -0.765625 0.625 -2.25q0 -1.671875 -0.609375 -2.40625q-0.59375 -0.734375 -1.53125 -0.734375q-0.90625 0 -1.53125 0.734375q-0.609375 0.71875 -0.609375 2.15625zm11.161606 5.046875l-3.0625 -9.671875l2.484375 0l1.8125 6.34375l1.671875 -6.34375l2.46875 0l1.609375 6.34375l1.859375 -6.34375l2.515625 0l-3.109375 9.671875l-2.453125 0l-1.671875 -6.21875l-1.640625 6.21875l-2.484375 0zm14.620804 -6.71875l-2.328125 -0.421875q0.40625 -1.40625 1.359375 -2.078125q0.953125 -0.671875 2.84375 -0.671875q1.703125 0 2.546875 0.40625q0.84375 0.40625 1.171875 1.03125q0.34375 0.625 0.34375 2.28125l-0.015625 3.0q0 1.265625 0.109375 1.875q0.125 0.609375 0.46875 1.296875l-2.53125 0q-0.109375 -0.25 -0.25 -0.75q-0.0625 -0.234375 -0.09375 -0.3125q-0.65625 0.640625 -1.40625 0.96875q-0.734375 0.3125 -1.59375 0.3125q-1.484375 0 -2.34375 -0.8125q-0.859375 -0.8125 -0.859375 -2.046875q0 -0.828125 0.390625 -1.46875q0.390625 -0.640625 1.09375 -0.96875q0.703125 -0.34375 2.03125 -0.609375q1.796875 -0.328125 2.484375 -0.625l0 -0.25q0 -0.75 -0.359375 -1.0625q-0.359375 -0.3125 -1.375 -0.3125q-0.6875 0 -1.078125 0.28125q-0.375 0.265625 -0.609375 0.9375zm3.421875 2.078125q-0.484375 0.15625 -1.5625 0.390625q-1.0625 0.21875 -1.390625 0.4375q-0.5 0.359375 -0.5 0.90625q0 0.53125 0.40625 0.9375q0.40625 0.390625 1.015625 0.390625q0.703125 0 1.328125 -0.46875q0.46875 -0.34375 0.609375 -0.84375q0.09375 -0.328125 0.09375 -1.25l0 -0.5zm7.500717 4.640625l-2.5625 0l0 -9.671875l2.375 0l0 1.375q0.609375 -0.984375 1.09375 -1.28125q0.484375 -0.3125 1.109375 -0.3125q0.875 0 1.6875 0.484375l-0.796875 2.234375q-0.640625 -0.421875 -1.203125 -0.421875q-0.53125 0 -0.90625 0.296875q-0.375 0.296875 -0.59375 1.078125q-0.203125 0.765625 -0.203125 3.234375l0 2.984375zm10.400894 -3.078125l2.546875 0.421875q-0.484375 1.40625 -1.546875 2.140625q-1.0625 0.734375 -2.65625 0.734375q-2.515625 0 -3.734375 -1.65625q-0.953125 -1.3125 -0.953125 -3.328125q0 -2.40625 1.25 -3.765625q1.265625 -1.359375 3.1875 -1.359375q2.15625 0 3.40625 1.421875q1.25 1.421875 1.1875 4.375l-6.40625 0q0.03125 1.140625 0.609375 1.78125q0.59375 0.625 1.484375 0.625q0.59375 0 1.0 -0.328125q0.421875 -0.328125 0.625 -1.0625zm0.15625 -2.59375q-0.03125 -1.109375 -0.578125 -1.6875q-0.546875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.390625 0.609375q-0.546875 0.609375 -0.53125 1.65625l3.828125 0z" fill-rule="nonzero"/><path fill="#93c47d" d="m49.255417 554.6262l5.34375 0q1.59375 0 2.359375 0.140625q0.78125 0.125 1.390625 0.546875q0.625 0.421875 1.03125 1.125q0.40625 0.6875 0.40625 1.546875q0 0.9375 -0.5 1.734375q-0.5 0.78125 -1.375 1.171875q1.21875 0.359375 1.875 1.21875q0.65625 0.84375 0.65625 2.0q0 0.921875 -0.421875 1.78125q-0.421875 0.859375 -1.15625 1.375q-0.734375 0.515625 -1.796875 0.625q-0.6875 0.078125 -3.265625 0.09375l-4.546875 0l0 -13.359375zm2.703125 2.234375l0 3.078125l1.765625 0q1.578125 0 1.953125 -0.046875q0.703125 -0.078125 1.09375 -0.46875q0.40625 -0.40625 0.40625 -1.046875q0 -0.625 -0.34375 -1.0q-0.34375 -0.390625 -1.015625 -0.484375q-0.40625 -0.03125 -2.3125 -0.03125l-1.546875 0zm0 5.296875l0 3.578125l2.5 0q1.453125 0 1.84375 -0.078125q0.609375 -0.109375 0.984375 -0.53125q0.375 -0.421875 0.375 -1.140625q0 -0.59375 -0.296875 -1.015625q-0.28125 -0.421875 -0.84375 -0.609375q-0.546875 -0.203125 -2.390625 -0.203125l-2.171875 0zm17.113571 5.828125l0 -1.453125q-0.53125 0.78125 -1.390625 1.234375q-0.859375 0.4375 -1.8125 0.4375q-0.96875 0 -1.75 -0.421875q-0.765625 -0.4375 -1.125 -1.203125q-0.34375 -0.78125 -0.34375 -2.140625l0 -6.125l2.5625 0l0 4.4375q0 2.046875 0.140625 2.515625q0.140625 0.453125 0.515625 0.71875q0.375 0.265625 0.953125 0.265625q0.65625 0 1.171875 -0.359375q0.515625 -0.359375 0.703125 -0.890625q0.203125 -0.53125 0.203125 -2.609375l0 -4.078125l2.546875 0l0 9.671875l-2.375 0zm5.036606 -10.984375l0 -2.375l2.5625 0l0 2.375l-2.5625 0zm0 10.984375l0 -9.671875l2.5625 0l0 9.671875l-2.5625 0zm5.183304 0l0 -13.359375l2.5625 0l0 13.359375l-2.5625 0zm14.058304 0l-2.390625 0l0 -1.421875q-0.59375 0.828125 -1.40625 1.234375q-0.796875 0.40625 -1.609375 0.40625q-1.671875 0 -2.859375 -1.34375q-1.1875 -1.34375 -1.1875 -3.75q0 -2.453125 1.15625 -3.734375q1.15625 -1.28125 2.921875 -1.28125q1.625 0 2.8125 1.34375l0 -4.8125l2.5625 0l0 13.359375zm-6.84375 -5.046875q0 1.546875 0.4375 2.234375q0.609375 1.015625 1.71875 1.015625q0.890625 0 1.5 -0.75q0.625 -0.765625 0.625 -2.25q0 -1.671875 -0.609375 -2.40625q-0.59375 -0.734375 -1.53125 -0.734375q-0.90625 0 -1.53125 0.734375q-0.609375 0.71875 -0.609375 2.15625z" fill-rule="nonzero"/></g></svg>
diff --git a/notebooks/end2end_example/tfc_end2end_example.ipynb b/notebooks/end2end_example/tfc_end2end_example.ipynb
index 6399d85ebdec74374115380d6e81a1228f54b0da..a067c6f6f8af1ef9e26384e1b2d92458c93b97fb 100644
--- a/notebooks/end2end_example/tfc_end2end_example.ipynb
+++ b/notebooks/end2end_example/tfc_end2end_example.ipynb
@@ -61,8 +61,8 @@
     "-------------\n",
     "1. [Brevitas export](#brev_exp)\n",
     "2. [Network preparation](#nw_prep)\n",
-    "3. [Vivado HLS and IPI](#vivado)\n",
-    "4. [PYNQ hardware generation and deployment](#hw_test)"
+    "3. [Hardware build](#vivado)\n",
+    "4. [PYNQ deployment](#hw_test)"
    ]
   },
   {
@@ -70,7 +70,7 @@
    "metadata": {},
    "source": [
     "## 1. Brevitas export <a id='brev_exp'></a>\n",
-    "FINN expects an ONNX model as input. This can be a model trained with [Brevitas](https://github.com/Xilinx/brevitas). Brevitas is a PyTorch library for quantization-aware training and the FINN Docker image comes with several [example Brevitas networks](https://github.com/maltanar/brevitas_cnv_lfc). To show the FINN end-to-end flow, we'll use the TFC-w1a1 model as example network.\n",
+    "FINN expects an ONNX model as input. This can be a model trained with [Brevitas](https://github.com/Xilinx/brevitas). Brevitas is a PyTorch library for quantization-aware training and the FINN Docker image comes with several [example Brevitas networks](https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq). To show the FINN end-to-end flow, we'll use the TFC-w1a1 model as example network.\n",
     "\n",
     "First a few things have to be imported. Then the model can be loaded with the pretrained weights."
    ]
@@ -84,8 +84,10 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "/workspace/brevitas_cnv_lfc/training_scripts/models/TFC.py:85: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n",
-      "  x = 2.0 * x - torch.tensor([1.0]).to(self.device)\n"
+      "Downloading: \"https://github.com/Xilinx/brevitas/releases/download/bnn_pynq-r1/tfc_1w1a-45185b4d.pth\" to /home/maltanar/.cache/torch/checkpoints/tfc_1w1a-45185b4d.pth\n",
+      "100%|██████████| 249073/249073 [00:00<00:00, 767315.58it/s]\n",
+      "/workspace/brevitas/brevitas_examples/bnn_pynq/models/FC.py:84: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n",
+      "  x = 2.0 * x - torch.tensor([1.0], device=x.device)\n"
      ]
     }
    ],
@@ -132,7 +134,7 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1ad0b6e80>"
+       "<IPython.lib.display.IFrame at 0x7fe30c65e828>"
       ]
      },
      "execution_count": 3,
@@ -219,7 +221,8 @@
     "* GiveReadableTensorNames\n",
     "* InferShapes\n",
     "* InferDataTypes\n",
-    "* FoldConstants"
+    "* FoldConstants\n",
+    "* RemoveStaticGraphInputs"
    ]
   },
   {
@@ -228,7 +231,7 @@
    "source": [
     "In the first two transformations (`GiveUniqueNodeNames`, `GiveReadableTensorNames`) the nodes in the graph are first given unique (by enumeration) names, then the tensors are given human-readable names (based on the node names). The following two transformations (`InferShapes`, `InferDataTypes`) derive the shapes and data types of the tensors from the model properties and set them in the `ValueInfo` of the model. These transformations can almost always be applied without negative effects and do not affect the structure of the graph, ensuring that all the information needed is available.\n",
     "\n",
-    "The last listed transformation is `FoldConstants`, which performs constant folding. It identifies a node with constant inputs and determines its output. The result is then set as constant-only inputs for the following node and the old node is removed. Although this transformation changes the structure of the model, it is a transformation that is usually always desired and can be applied to any model."
+    "The next listed transformation is `FoldConstants`, which performs constant folding. It identifies a node with constant inputs and determines its output. The result is then set as constant-only inputs for the following node and the old node is removed. Although this transformation changes the structure of the model, it is a transformation that is usually always desired and can be applied to any model. And finally, we have `RemoveStaticGraphInputs` to remove any top-level graph inputs that already have ONNX initializers associated with them. "
    ]
   },
   {
@@ -240,11 +243,11 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [],
    "source": [
-    "from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames\n",
+    "from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames, RemoveStaticGraphInputs\n",
     "from finn.transformation.infer_shapes import InferShapes\n",
     "from finn.transformation.infer_datatypes import InferDataTypes\n",
     "from finn.transformation.fold_constants import FoldConstants\n",
@@ -254,6 +257,7 @@
     "model = model.transform(GiveUniqueNodeNames())\n",
     "model = model.transform(GiveReadableTensorNames())\n",
     "model = model.transform(InferDataTypes())\n",
+    "model = model.transform(RemoveStaticGraphInputs())\n",
     "\n",
     "model.save(build_dir+\"/tfc_w1_a1_tidy.onnx\")"
    ]
@@ -267,7 +271,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [
     {
@@ -293,10 +297,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1ad0639e8>"
+       "<IPython.lib.display.IFrame at 0x7fe2d26a7da0>"
       ]
      },
-     "execution_count": 6,
+     "execution_count": 8,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -305,6 +309,157 @@
     "showInNetron(build_dir+\"/tfc_w1_a1_tidy.onnx\")"
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Adding Pre- and Postprocessing <a id='prepost'></a>\n",
+    "\n",
+    "In many cases, it's common to apply some preprocessing to the raw data in a machine learning framework prior to training. For image classification networks, this may include conversion of raw 8-bit RGB values into floating point values between 0 and 1. Similarly, at the output of the network some postprocessing may be performed during deployment, such as extracting the indices of the classifications with the largest value (top-K indices).\n",
+    "\n",
+    "In FINN, we can bake some of these pre/postprocessing operatings into the graph, and in some cases these can be highly beneficial for performance by allowing our accelerator to directly consume raw data instead of going through CPU preprocessing. \n",
+    "\n",
+    "We'll demonstrate this for our small image classification network as follows. Brevitas preprocesses BNN-PYNQ network inputs with `torchvision.transforms.ToTensor()` [prior to training](https://github.com/Xilinx/brevitas/blob/master/brevitas_examples/bnn_pynq/trainer.py#L85), which converts 8-bit RGB values into floats between 0 and 1 by dividing the input by 255. We can achieve the same effect in FINN by exporting a single-node ONNX graph for division by 255 (which already exists as `finn.util.pytorch.ToTensor` and merging this with our original model. Finally, we're going to mark our input tensor as 8-bit to let FINN know which level of precision to use."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 109,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Stopping http://0.0.0.0:8081\n",
+      "Serving '/workspace/finn/tfc_w1_a1_with_preproc.onnx' at http://0.0.0.0:8081\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/workspace/finn/src/finn/transformation/infer_data_layouts.py:113: UserWarning: Assuming 4D input is NCHW\n",
+      "  warnings.warn(\"Assuming 4D input is NCHW\")\n"
+     ]
+    },
+    {
+     "data": {
+      "text/html": [
+       "\n",
+       "        <iframe\n",
+       "            width=\"100%\"\n",
+       "            height=\"400\"\n",
+       "            src=\"http://0.0.0.0:8081/\"\n",
+       "            frameborder=\"0\"\n",
+       "            allowfullscreen\n",
+       "        ></iframe>\n",
+       "        "
+      ],
+      "text/plain": [
+       "<IPython.lib.display.IFrame at 0x7fe264171f98>"
+      ]
+     },
+     "execution_count": 109,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from finn.util.pytorch import ToTensor\n",
+    "from finn.transformation.merge_onnx_models import MergeONNXModels\n",
+    "from finn.core.datatype import DataType\n",
+    "\n",
+    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_tidy.onnx\")\n",
+    "global_inp_name = model.graph.input[0].name\n",
+    "ishape = model.get_tensor_shape(global_inp_name)\n",
+    "# preprocessing: torchvision's ToTensor divides uint8 inputs by 255\n",
+    "totensor_pyt = ToTensor()\n",
+    "chkpt_preproc_name = build_dir+\"/tfc_w1_a1_preproc.onnx\"\n",
+    "bo.export_finn_onnx(totensor_pyt, ishape, chkpt_preproc_name)\n",
+    "\n",
+    "# join preprocessing and core model\n",
+    "pre_model = ModelWrapper(chkpt_preproc_name)\n",
+    "model = model.transform(MergeONNXModels(pre_model))\n",
+    "# add input quantization annotation: UINT8 for all BNN-PYNQ models\n",
+    "global_inp_name = model.graph.input[0].name\n",
+    "model.set_tensor_datatype(global_inp_name, DataType.UINT8)\n",
+    "\n",
+    "model.save(build_dir+\"/tfc_w1_a1_with_preproc.onnx\")\n",
+    "showInNetron(build_dir+\"/tfc_w1_a1_with_preproc.onnx\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "You can observe two changes in the graph above: a `Div` node has appeared in the beginning to perform the input preprocessing, and the `global_in` tensor now has a quantization annotation to mark it as an unsigned 8-bit value.\n",
+    "\n",
+    "For the postprocessing we'll insert a TopK node for k=1 at the end of our graph. This will extract the index (class number) for the largest-valued output."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 110,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Stopping http://0.0.0.0:8081\n",
+      "Serving '/workspace/finn/tfc_w1_a1_pre_post.onnx' at http://0.0.0.0:8081\n"
+     ]
+    },
+    {
+     "data": {
+      "text/html": [
+       "\n",
+       "        <iframe\n",
+       "            width=\"100%\"\n",
+       "            height=\"400\"\n",
+       "            src=\"http://0.0.0.0:8081/\"\n",
+       "            frameborder=\"0\"\n",
+       "            allowfullscreen\n",
+       "        ></iframe>\n",
+       "        "
+      ],
+      "text/plain": [
+       "<IPython.lib.display.IFrame at 0x7fe2640f4588>"
+      ]
+     },
+     "execution_count": 110,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from finn.transformation.insert_topk import InsertTopK\n",
+    "\n",
+    "# postprocessing: insert Top-1 node at the end\n",
+    "model = model.transform(InsertTopK(k=1))\n",
+    "chkpt_name = build_dir+\"/tfc_w1_a1_pre_post.onnx\"\n",
+    "# tidy-up again\n",
+    "model = model.transform(InferShapes())\n",
+    "model = model.transform(FoldConstants())\n",
+    "model = model.transform(GiveUniqueNodeNames())\n",
+    "model = model.transform(GiveReadableTensorNames())\n",
+    "model = model.transform(InferDataTypes())\n",
+    "model = model.transform(RemoveStaticGraphInputs())\n",
+    "model.save(chkpt_name)\n",
+    "\n",
+    "showInNetron(build_dir+\"/tfc_w1_a1_pre_post.onnx\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Notice the`TopK` node that has appeared at the end of the network. With our pre- and postprocessing in place, we can move on to the next step in the flow, which is streamlining."
+   ]
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
@@ -317,7 +472,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 14,
    "metadata": {},
    "outputs": [
     {
@@ -333,9 +488,10 @@
       "            ConvertDivToMul(),\n",
       "            BatchNormToAffine(),\n",
       "            ConvertSignToThres(),\n",
+      "            AbsorbSignBiasIntoMultiThreshold(),\n",
       "            MoveAddPastMul(),\n",
       "            MoveScalarAddPastMatMul(),\n",
-      "            MoveScalarAddPastConv(),\n",
+      "            MoveAddPastConv(),\n",
       "            MoveScalarMulPastMatMul(),\n",
       "            MoveScalarMulPastConv(),\n",
       "            MoveAddPastMul(),\n",
@@ -350,6 +506,7 @@
       "        ]\n",
       "        for trn in streamline_transformations:\n",
       "            model = model.transform(trn)\n",
+      "            model = model.transform(RemoveIdentityOps())\n",
       "            model = model.transform(GiveUniqueNodeNames())\n",
       "            model = model.transform(GiveReadableTensorNames())\n",
       "            model = model.transform(InferDataTypes())\n",
@@ -374,7 +531,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 26,
    "metadata": {},
    "outputs": [
     {
@@ -400,16 +557,22 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1346e4ef0>"
+       "<IPython.lib.display.IFrame at 0x7fe2640f4d30>"
       ]
      },
-     "execution_count": 8,
+     "execution_count": 26,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_tidy.onnx\")\n",
+    "from finn.transformation.streamline.reorder import MoveScalarLinearPastInvariants\n",
+    "import finn.transformation.streamline.absorb as absorb\n",
+    "\n",
+    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_pre_post.onnx\")\n",
+    "# move initial Mul (from preproc) past the Reshape\n",
+    "model = model.transform(MoveScalarLinearPastInvariants())\n",
+    "# streamline\n",
     "model = model.transform(Streamline())\n",
     "model.save(build_dir+\"/tfc_w1_a1_streamlined.onnx\")\n",
     "showInNetron(build_dir+\"/tfc_w1_a1_streamlined.onnx\")"
@@ -428,7 +591,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
+   "execution_count": 28,
    "metadata": {},
    "outputs": [
     {
@@ -454,24 +617,31 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1346f7780>"
+       "<IPython.lib.display.IFrame at 0x7fe30c65e898>"
       ]
      },
-     "execution_count": 9,
+     "execution_count": 28,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
     "from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount\n",
-    "import finn.transformation.streamline.absorb as absorb\n",
     "from finn.transformation.streamline.round_thresholds import RoundAndClipThresholds\n",
+    "from finn.transformation.infer_data_layouts import InferDataLayouts\n",
+    "from finn.transformation.general import RemoveUnusedTensors\n",
     "\n",
     "model = model.transform(ConvertBipolarMatMulToXnorPopcount())\n",
     "model = model.transform(absorb.AbsorbAddIntoMultiThreshold())\n",
     "model = model.transform(absorb.AbsorbMulIntoMultiThreshold())\n",
+    "# absorb final add-mul nodes into TopK\n",
+    "model = model.transform(absorb.AbsorbScalarMulAddIntoTopK())\n",
     "model = model.transform(RoundAndClipThresholds())\n",
     "\n",
+    "# bit of tidy-up\n",
+    "model = model.transform(InferDataLayouts())\n",
+    "model = model.transform(RemoveUnusedTensors())\n",
+    "\n",
     "model.save(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")\n",
     "showInNetron(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")"
    ]
@@ -502,7 +672,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 10,
+   "execution_count": 29,
    "metadata": {
     "scrolled": false
    },
@@ -530,10 +700,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1346f1080>"
+       "<IPython.lib.display.IFrame at 0x7fe30c65e748>"
       ]
      },
-     "execution_count": 10,
+     "execution_count": 29,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -542,6 +712,10 @@
     "import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls\n",
     "model = ModelWrapper(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")\n",
     "model = model.transform(to_hls.InferBinaryStreamingFCLayer(\"decoupled\"))\n",
+    "# TopK to LabelSelect\n",
+    "model = model.transform(to_hls.InferLabelSelectLayer())\n",
+    "# input quantization (if any) to standalone thresholding\n",
+    "model = model.transform(to_hls.InferThresholdingLayer())\n",
     "model.save(build_dir+\"/tfc_w1_a1_hls_layers.onnx\")\n",
     "showInNetron(build_dir+\"/tfc_w1_a1_hls_layers.onnx\")"
    ]
@@ -564,7 +738,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 11,
+   "execution_count": 30,
    "metadata": {},
    "outputs": [
     {
@@ -590,10 +764,10 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1ad0b6e48>"
+       "<IPython.lib.display.IFrame at 0x7fe2640abc88>"
       ]
      },
-     "execution_count": 11,
+     "execution_count": 30,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -616,7 +790,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 12,
+   "execution_count": 33,
    "metadata": {},
    "outputs": [
     {
@@ -625,7 +799,7 @@
      "text": [
       "\n",
       "Stopping http://0.0.0.0:8081\n",
-      "Serving '/tmp/finn_dev_jakobap/dataflow_partition_pbrjefjg/df_model.onnx' at http://0.0.0.0:8081\n"
+      "Serving '/tmp/finn_dev_maltanar/dataflow_partition0_q1ym9aul/df_model.onnx' at http://0.0.0.0:8081\n"
      ]
     },
     {
@@ -642,17 +816,18 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1346f3550>"
+       "<IPython.lib.display.IFrame at 0x7fe264098f60>"
       ]
      },
-     "execution_count": 12,
+     "execution_count": 33,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
     "from finn.custom_op.registry import getCustomOp\n",
-    "sdp_node = getCustomOp(parent_model.graph.node[2])\n",
+    "sdp_node = parent_model.get_nodes_by_op_type(\"StreamingDataflowPartition\")[0]\n",
+    "sdp_node = getCustomOp(sdp_node)\n",
     "dataflow_model_filename = sdp_node.get_nodeattr(\"model\")\n",
     "showInNetron(dataflow_model_filename)"
    ]
@@ -666,7 +841,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 13,
+   "execution_count": 34,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -677,7 +852,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Folding and Datawidth Converter, FIFO and TLastMarker Insertion <a id='folding'></a>\n",
+    "### Folding: Adjusting the Parallelism <a id='folding'></a>\n",
     "\n",
     "*Folding* in FINN describes how much a layer is time-multiplexed in terms of execution resources. There are several *folding factors* for each layer, controlled by the PE (parallelization over outputs) and SIMD (parallelization over inputs) parameters as described by the original [FINN paper](https://arxiv.org/pdf/1612.07119). The higher the PE and SIMD values are set, the faster the generated accelerator will run, and the more FPGA resources it will consume. \n",
     "\n",
@@ -693,33 +868,28 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 14,
+   "execution_count": 35,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "CustomOp wrapper is of class StreamingFCLayer_Batch\n"
+      "CustomOp wrapper is of class Thresholding_Batch\n"
      ]
     },
     {
      "data": {
       "text/plain": [
        "{'PE': ('i', True, 0),\n",
-       " 'SIMD': ('i', True, 0),\n",
-       " 'MW': ('i', True, 0),\n",
-       " 'MH': ('i', True, 0),\n",
-       " 'resType': ('s', True, ''),\n",
-       " 'ActVal': ('i', False, 0),\n",
+       " 'NumChannels': ('i', True, 0),\n",
+       " 'ram_style': ('s', False, 'distributed'),\n",
        " 'inputDataType': ('s', True, ''),\n",
-       " 'weightDataType': ('s', True, ''),\n",
        " 'outputDataType': ('s', True, ''),\n",
-       " 'binaryXnorMode': ('i', False, 0),\n",
-       " 'noActivation': ('i', False, 0),\n",
+       " 'inFIFODepth': ('i', False, 2),\n",
+       " 'outFIFODepth': ('i', False, 2),\n",
        " 'numInputVectors': ('ints', False, [1]),\n",
-       " 'mem_mode': ('s', False, 'const'),\n",
-       " 'ram_style': ('s', False, 'auto'),\n",
+       " 'ActVal': ('i', False, 0),\n",
        " 'backend': ('s', True, 'fpgadataflow'),\n",
        " 'code_gen_dir_cppsim': ('s', False, ''),\n",
        " 'code_gen_dir_ipgen': ('s', False, ''),\n",
@@ -728,17 +898,17 @@
        " 'ip_path': ('s', False, ''),\n",
        " 'ip_vlnv': ('s', False, ''),\n",
        " 'exec_mode': ('s', False, ''),\n",
-       " 'sim_cycles': ('i', False, 0),\n",
+       " 'cycles_rtlsim': ('i', False, 0),\n",
+       " 'cycles_estimate': ('i', False, 0),\n",
        " 'rtlsim_trace': ('s', False, ''),\n",
        " 'res_estimate': ('s', False, ''),\n",
        " 'res_hls': ('s', False, ''),\n",
        " 'res_synth': ('s', False, ''),\n",
        " 'rtlsim_so': ('s', False, ''),\n",
-       " 'inFIFODepth': ('i', False, 2),\n",
-       " 'outFIFODepth': ('i', False, 2)}"
+       " 'partition_id': ('i', False, 0)}"
       ]
      },
-     "execution_count": 14,
+     "execution_count": 35,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -762,7 +932,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 15,
+   "execution_count": 41,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -780,7 +950,12 @@
     "    fcl_inst.set_nodeattr(\"SIMD\", simd)\n",
     "    fcl_inst.set_nodeattr(\"inFIFODepth\", ififo)\n",
     "    fcl_inst.set_nodeattr(\"outFIFODepth\", ofifo)\n",
-    "    fcl_inst.set_nodeattr(\"ram_style\", ramstyle)"
+    "    fcl_inst.set_nodeattr(\"ram_style\", ramstyle)\n",
+    "    \n",
+    "# set parallelism for input quantizer to be same as first layer's SIMD\n",
+    "inp_qnt_node = model.get_nodes_by_op_type(\"Thresholding_Batch\")[0]\n",
+    "inp_qnt = getCustomOp(inp_qnt_node)\n",
+    "inp_qnt.set_nodeattr(\"PE\", 49)"
    ]
   },
   {
@@ -795,36 +970,14 @@
    "metadata": {},
    "source": [
     "Besides PE and SIMD three other node attributes are set. `ram_style` specifies how the weights are to be stored (BRAM, LUTRAM, and so on). It can be selected explicitly or with the option `auto` you can let Vivado decide.\n",
-    "`inFIFODepth` and `outFIFODepth` specifies the FIFO depths that is needed by the node from the surrounding FIFOs. These attributes are used in the transformation 'InsertFIFO' to insert the appropriate FIFOs between the nodes.\n",
-    "\n",
-    "But before FIFOs can be added, it must be determined whether datawidth converters (DWC) are required and they must be inserted correctly. Because by setting the folding, the folded output shape of one node may not match the folded input shape of the next node. \n",
-    "\n",
-    "In the following, first DWCs and then FIFOs are inserted using the corresponding transformations in FINN."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from finn.transformation.fpgadataflow.insert_dwc import InsertDWC\n",
-    "from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO\n",
+    "`inFIFODepth` and `outFIFODepth` specifies the FIFO depths that is needed by the node from the surrounding FIFOs. These attributes are used in the transformation 'InsertFIFO' to insert the appropriate FIFOs between the nodes, which will be automatically called as part of the hardware build process.\n",
     "\n",
-    "model = model.transform(InsertDWC())\n",
-    "model = model.transform(InsertFIFO())"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Finally, we will run the `InsertTLastMarker` transformation to get a `TLastMarker` node at the output of this graph, which is necessary to run the DMA engines correctly. Using netron we can observe that now the nodes contain the set folding, if necessary a DWC is inserted, inbetween the nodes are FIFOs inserted and the last node is the `TLastMarker` node we insert in the following."
+    "In previous versions of FINN we had to call transformations to insert data width converters, FIFOs and `TLastMarker` manually at this step. This is no longer needed, as all this is taken care of by the `ZynqBuild` or `VitisBuild` transformations."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 17,
+   "execution_count": 42,
    "metadata": {
     "scrolled": true
    },
@@ -852,17 +1005,15 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe135b84780>"
+       "<IPython.lib.display.IFrame at 0x7fe2640712e8>"
       ]
      },
-     "execution_count": 17,
+     "execution_count": 42,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker\n",
-    "model = model.transform(InsertTLastMarker())\n",
     "model.save(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")\n",
     "showInNetron(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")"
    ]
@@ -878,24 +1029,23 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## 3. Vivado HLS and IPI <a id='vivado'></a>\n",
-    "* [Generating HLS Code](#hls_per_layer)\n",
-    "* [Synthesizing HLS to IP Blocks](#hls_synth)\n",
-    "* [IP Stitching](#ip_stitching)\n",
+    "## 3. Hardware Build <a id='vivado'></a>\n",
+    "\n",
+    "We're finally ready to start generating hardware from our network. Depending on whether you want to target a Zynq or Alveo platform, FINN offers two transformations to build the accelerator, integrate into an appropriate shell and build a bitfile. These are `ZynqBuild` and `VitisBuild` for Zynq and Alveo, respectively. In this notebook we'll demonstrate the `ZynqBuild` as these boards are more common and it's much faster to complete bitfile generation for the smaller FPGAs found on them.\n",
     "\n",
     "As we will be dealing with FPGA synthesis tools in these tasks, we'll define two helper variables that describe the Xilinx FPGA part name and the PYNQ board name that we are targeting."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 18,
+   "execution_count": 43,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "dict_keys(['Ultra96', 'Pynq-Z1', 'Pynq-Z2', 'ZCU104'])\n"
+      "dict_keys(['Ultra96', 'Pynq-Z1', 'Pynq-Z2', 'ZCU102', 'ZCU104'])\n"
      ]
     }
    ],
@@ -907,7 +1057,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 19,
+   "execution_count": 44,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -921,65 +1071,88 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Generating HLS Code <a id='hls_per_layer'></a>\n",
-    "This section deals with the generation of an IP block from the different layers. These can then be stitched to a block design that corresponds to the complete model. The single conversion into IP blocks allows a good transparency and we can check the functionality of each IP block and compare it with the behaviour of the corresponding ONNX node. "
+    "In previous versions of FINN, we had to manually go through several steps to generate HLS code, stitch IP, create a PYNQ project and run synthesis. All these steps are now performed by the `ZynqBuild` transform (or the `VitisBuild` transform for Alveo). **As this involves calling HLS synthesis and Vivado synthesis, this transformation will run for some time (up to half an hour depending on your PC).**"
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 45,
    "metadata": {},
+   "outputs": [],
    "source": [
-    "Two transformations are required to generate HLS IP blocks for each layer: \n",
-    "* `PrepareIP` which generates the HLS C++ code for the node and a tcl-script which starts the HLS synthesis and exports the design as IP. \n",
-    "* `HLSSynthIP` which passes the tcl-script to Vivado HLS and thus performs the actual IP generation. \n",
-    "\n",
-    "We start off by giving unique node names using the basic transformation `GiveUniqueNodeNames`, and then proceed with the HLS C++ code generation with `PrepareIP`."
+    "from finn.transformation.fpgadataflow.make_zynq_proj import ZynqBuild\n",
+    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")\n",
+    "model = model.transform(ZynqBuild(platform = pynq_board, period_ns = target_clk_ns))"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 20,
+   "execution_count": 46,
    "metadata": {},
    "outputs": [],
    "source": [
-    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")\n",
-    "model = model.transform(GiveUniqueNodeNames())\n",
-    "\n",
-    "from finn.transformation.fpgadataflow.prepare_ip import PrepareIP\n",
-    "model = model.transform(PrepareIP(fpga_part, target_clk_ns))"
+    "model.save(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Synthesizing HLS to IP Blocks <a id='hls_synth'></a>\n",
+    "### Examining the generated outputs <a id='gen_outputs'></a>\n",
     "\n",
-    "Now that we have generated the HLS code for each layer, we can call the `HLSSynthIP` transformation to convert the generated HLS into Vivado IP blocks. **As this involves calling HLS synthesis, this transformation will run for some time (several minutes).**"
+    "Let's start by viewing the post-synthesis model in Netron:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 21,
+   "execution_count": 99,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Stopping http://0.0.0.0:8081\n",
+      "Serving '/workspace/finn/tfc_w1_a1_post_synthesis.onnx' at http://0.0.0.0:8081\n"
+     ]
+    },
+    {
+     "data": {
+      "text/html": [
+       "\n",
+       "        <iframe\n",
+       "            width=\"100%\"\n",
+       "            height=\"400\"\n",
+       "            src=\"http://0.0.0.0:8081/\"\n",
+       "            frameborder=\"0\"\n",
+       "            allowfullscreen\n",
+       "        ></iframe>\n",
+       "        "
+      ],
+      "text/plain": [
+       "<IPython.lib.display.IFrame at 0x7fe2ef58eb00>"
+      ]
+     },
+     "execution_count": 99,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
-    "from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP\n",
-    "\n",
-    "model = model.transform(HLSSynthIP())\n",
-    "model.save(build_dir+\"/tfc_w1_a1_ipgen.onnx\")"
+    "showInNetron(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Each `StreamingFCLayer_Batch` node now has new attributes which can be examined more closely with netron."
+    "We can see that our sequence of HLS layers has been replaced with `StreamingDataflowPartition`s, each of which point to a different ONNX file. You can open a Netron session for each of them to view their contents. Here, the first and last partitions contain only an `IODMA` node, which was inserted automatically to move data between DRAM and the accelerator. Let's take a closer look at the middle partition, which contains all our layers:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 22,
+   "execution_count": 102,
    "metadata": {},
    "outputs": [
     {
@@ -988,7 +1161,7 @@
      "text": [
       "\n",
       "Stopping http://0.0.0.0:8081\n",
-      "Serving '/workspace/finn/tfc_w1_a1_ipgen.onnx' at http://0.0.0.0:8081\n"
+      "Serving '/tmp/finn_dev_maltanar/dataflow_partition2_b6c72_s0/df_model.onnx' at http://0.0.0.0:8081\n"
      ]
     },
     {
@@ -1005,906 +1178,492 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7fe1346f7588>"
+       "<IPython.lib.display.IFrame at 0x7fe2ef5a0e48>"
       ]
      },
-     "execution_count": 22,
+     "execution_count": 102,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "showInNetron(build_dir+\"/tfc_w1_a1_ipgen.onnx\")"
+    "model = ModelWrapper(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")\n",
+    "sdp_node_middle = getCustomOp(model.graph.node[1])\n",
+    "postsynth_layers = sdp_node_middle.get_nodeattr(\"model\")\n",
+    "\n",
+    "showInNetron(postsynth_layers)"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "There are two additional attributes: \n",
-    "* `code_gen_dir_ipgen` which contains the directory path where all the files generated by the ipgen transformations are stored\n",
-    "* `ipgen_path` which contains the path to the project directory in which the generated IP block is stored\n",
-    "\n",
-    "We can further investigate which files are produced by taking a look in this directory. For example for the first StreamingFCLayer_Batch node."
+    "We can see that `StreamingFIFO` and `StreamingDataWidthConverter` instances have been automatically inserted into the graph prior to hardware build. Transformations like `ZynqBuild` use the `metadata_props` of the model to put in additional metadata information relevant to the results of the transformation. Let's examine the metadata for the current graph containing all layers:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 23,
+   "execution_count": 103,
    "metadata": {},
    "outputs": [
     {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "StreamingFCLayer_Batch_0_memstream.v  thresh.h\r\n",
-      "hls_syn_StreamingFCLayer_Batch_0.tcl  top_StreamingFCLayer_Batch_0.cpp\r\n",
-      "ipgen.sh\t\t\t      vivado_hls.log\r\n",
-      "memblock_0.dat\t\t\t      weights.npy\r\n",
-      "project_StreamingFCLayer_Batch_0\r\n"
-     ]
+     "data": {
+      "text/plain": [
+       "[key: \"pynq_driver_dir\"\n",
+       "value: \"/tmp/finn_dev_maltanar/pynq_driver_kl300vbh\"\n",
+       ", key: \"vivado_stitch_proj\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_stitch_proj_yy5ixo91\"\n",
+       ", key: \"clk_ns\"\n",
+       "value: \"10\"\n",
+       ", key: \"wrapper_filename\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_stitch_proj_yy5ixo91/finn_vivado_stitch_proj.srcs/sources_1/bd/StreamingDataflowPartition_1/hdl/StreamingDataflowPartition_1_wrapper.v\"\n",
+       ", key: \"vivado_stitch_vlnv\"\n",
+       "value: \"xilinx_finn:finn:StreamingDataflowPartition_1:1.0\"\n",
+       ", key: \"vivado_stitch_ifnames\"\n",
+       "value: \"{\\'clk\\': [\\'ap_clk\\'], \\'rst\\': [\\'ap_rst_n\\'], \\'s_axis\\': [\\'s_axis_0\\'], \\'m_axis\\': [\\'m_axis_0\\'], \\'aximm\\': [], \\'axilite\\': []}\"\n",
+       ", key: \"platform\"\n",
+       "value: \"zynq-iodma\"\n",
+       "]"
+      ]
+     },
+     "execution_count": 103,
+     "metadata": {},
+     "output_type": "execute_result"
     }
    ],
    "source": [
-    "fc0w = getCustomOp(model.graph.node[1])\n",
-    "code_gen_dir = fc0w.get_nodeattr(\"code_gen_dir_ipgen\")\n",
-    "!ls {code_gen_dir}"
+    "model = ModelWrapper(postsynth_layers)\n",
+    "model.model.metadata_props"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Here we see that a Vivado project was built to create what we call the `stitched IP`, where all the IP blocks implementing various layers will be stitched together. You can view this stitched block design in Vivado, or [here](StreamingDataflowPartition_1.pdf) as an exported PDF."
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Directory *project_StreamingFCLayer_Batch_0* contains the project created by Vivado HLS into which the IP Block is exported, along with other files generated by Vivado HLS. If we compare it to the above visualization of the network with netron, this is exactly the name of the folder stored in the node attribute `ipgen_path`. The .cpp code that is passed to Vivado HLS can be found in the file *top_StreamingFCLayer_Batch_0.cpp*. The file *thresh.h* belongs to that as well, it contains the value for the thresholds. The weights are stored as .npy file and as .dat file (*memblock_0.dat*). *vivado_hls.log* is the log file from Vivado HLS. Besides these files, the folder contains *ipgen.sh* and *hls_syn_StreamingFCLayer_Batch_0.tcl* and because we use the StreamingFCLayer in \"decoupled\" mode a verilog wrapper (*StreamingFCLayer_Batch_0_memstream.v*) is produced, for more details on \"decoupled\" and \"const\" mode please see on the [FINN readthedocs website](https://finn.readthedocs.io/) under Internals. \n",
-    "\n",
-    "In the following we take a closer look at the two generated scripts. We start with *ipgen.sh*."
+    "Moving back to the top-level model, recall that `ZynqBuild` will create a Vivado project and synthesize it, so it will be creating metadata entries related to the paths and files that were created:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 24,
+   "execution_count": 97,
    "metadata": {},
    "outputs": [
     {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "#!/bin/bash \r\n",
-      "cd /tmp/finn_dev_jakobap/code_gen_ipgen_StreamingFCLayer_Batch_0_edb__5oc\r\n",
-      "vivado_hls /tmp/finn_dev_jakobap/code_gen_ipgen_StreamingFCLayer_Batch_0_edb__5oc/hls_syn_StreamingFCLayer_Batch_0.tcl\r\n",
-      "cd /workspace/finn\r\n"
-     ]
+     "data": {
+      "text/plain": [
+       "[key: \"pynq_driver_dir\"\n",
+       "value: \"/tmp/finn_dev_maltanar/pynq_driver_kl300vbh\"\n",
+       ", key: \"vivado_pynq_proj\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f\"\n",
+       ", key: \"bitfile\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f/resizer.bit\"\n",
+       ", key: \"hw_handoff\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f/resizer.hwh\"\n",
+       ", key: \"vivado_synth_rpt\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f/synth_report.xml\"\n",
+       ", key: \"platform\"\n",
+       "value: \"zynq-iodma\"\n",
+       "]"
+      ]
+     },
+     "execution_count": 97,
+     "metadata": {},
+     "output_type": "execute_result"
     }
    ],
    "source": [
-    "shell_script = code_gen_dir + \"/ipgen.sh\"\n",
-    "!cat {shell_script}"
+    "model = ModelWrapper(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")\n",
+    "model.model.metadata_props"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "The script consists only of two framing `cd` commands and a command to pass the tcl script to *vivado_hls*. The directory has to be changed to create the files in the correct folder and will then be changed back to the original directory. \n",
-    "\n",
-    "Below is the tcl script which is passed to *vivado_hls*."
+    "Here, we can see the directories that were created for the PYNQ driver (`pynq_driver_dir`) and the Vivado synthesis project (`vivado_pynq_proj`), as well as the locations of the bitfile, hardware handoff file and synthesis report."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 25,
+   "execution_count": 98,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "\r\n",
-      "set config_proj_name project_StreamingFCLayer_Batch_0\r\n",
-      "puts \"HLS project: $config_proj_name\"\r\n",
-      "set config_hwsrcdir \"/tmp/finn_dev_jakobap/code_gen_ipgen_StreamingFCLayer_Batch_0_edb__5oc\"\r\n",
-      "puts \"HW source dir: $config_hwsrcdir\"\r\n",
-      "set config_proj_part \"xc7z020clg400-1\"\r\n",
-      "\r\n",
-      "set config_bnnlibdir \"/workspace/finn-hlslib\"\r\n",
-      "\r\n",
-      "set config_toplevelfxn \"StreamingFCLayer_Batch_0\"\r\n",
-      "set config_clkperiod 10\r\n",
-      "\r\n",
-      "open_project $config_proj_name\r\n",
-      "add_files $config_hwsrcdir/top_StreamingFCLayer_Batch_0.cpp -cflags \"-std=c++0x -I$config_bnnlibdir\"\r\n",
-      "\r\n",
-      "set_top $config_toplevelfxn\r\n",
-      "open_solution sol1\r\n",
-      "set_part $config_proj_part\r\n",
-      "\r\n",
-      "config_interface -m_axi_addr64\r\n",
-      "config_rtl -auto_prefix\r\n",
-      "\r\n",
-      "\r\n",
-      "create_clock -period $config_clkperiod -name default\r\n",
-      "csynth_design\r\n",
-      "export_design -format ip_catalog\r\n",
-      "exit 0\r\n"
+      "NA\t\t\t      finn_zynq_link.runs  resizer.bit\t     vivado.jou\r\n",
+      "finn_zynq_link.cache\t      finn_zynq_link.srcs  resizer.hwh\t     vivado.log\r\n",
+      "finn_zynq_link.hw\t      finn_zynq_link.xpr   synth_project.sh\r\n",
+      "finn_zynq_link.ip_user_files  ip_config.tcl\t   synth_report.xml\r\n"
      ]
     }
    ],
    "source": [
-    "tcl_script = code_gen_dir + \"/hls_syn_StreamingFCLayer_Batch_0.tcl\"\n",
-    "!cat {tcl_script}"
+    "! ls {model.get_metadata_prop(\"vivado_pynq_proj\")}"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "In the first part of the script the project is configured. For example the FPGA part and the clock are set. Then the project is opened and the files are added. The toplevel function is set and after creating a clock, the design is first synthesized with `csynth` and then exported as an IP block.\n",
-    "\n",
-    "Now that all IP blocks are in place, they can be stitched together to create an IP design that matches the ONNX model. This is covered in the next section."
+    "Feel free to examine the generated Vivado project to get a feel for how the system-level integration is performed for the  FINN-generated \"stitched IP\", which appears as `StreamingDataflowPartition_1` in the top-level block design -- you can see it as a block diagram exported to PDF [here](top.pdf).\n"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### IP Stitching <a id='ip_stitching'></a>\n",
+    "## 4.  PYNQ deployment <a id='hw_test'></a>\n",
+    "\n",
+    "* [Deployment and Remote Execution](#deploy)\n",
+    "* [Validation on PYNQ Board](#validation)\n",
+    "* [Throughput Test on PYNQ Board](#throughput)\n",
     "\n",
-    "We now have IP blocks for each of our layers, and will stitch them together into a larger IP that implements the whole network using the `CreateStitchedIP` transformation. Bear in mind that this transformation can only be applied on a graph that only contains HLS nodes that already have been through the `HLSSynthIP` transformation, which is the last step we performed. Prior to calling IP stitching, we'll also use the `ReplaceVerilogRelPaths` transformation to convert any relative `$readmemh` paths in the generated IP blocks to absolute ones, which prevents errors later on. **This step invokes Vivado and may take a few minutes to run.**"
+    "\n",
+    "We are almost done preparing our hardware design. We'll now put it in a form suitable for use as a PYNQ overlay, synthesize and deploy it."
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 26,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [],
    "source": [
-    "from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP\n",
-    "from finn.transformation.fpgadataflow.replace_verilog_relpaths import ReplaceVerilogRelPaths\n",
-    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_ipgen.onnx\")\n",
-    "model = model.transform(ReplaceVerilogRelPaths())\n",
-    "model = model.transform(CreateStitchedIP(fpga_part))"
+    "### Deployment and Remote Execution <a id='deploy'></a>\n",
+    "\n",
+    "We'll now use the `DeployToPYNQ` transformation to create a deployment folder with the bitfile and driver file(s), and copy that to the PYNQ board. You can change the default IP address, username, password and target folder for the PYNQ below."
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 47,
    "metadata": {},
+   "outputs": [],
    "source": [
-    "If you examine the nodes themselves on the transformed model you won't see a difference, because the IP stitching adds model-level metadata to the graph. This can be accessed using the `.model.metadata_props`, the `get_metadata_prop` function in `ModelWrapper`, or by clicking on the global input/output tensors in Netron."
+    "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n",
+    "ip = \"192.168.2.99\"\n",
+    "port = \"22\"\n",
+    "username = \"xilinx\"\n",
+    "password = \"xilinx\"\n",
+    "target_dir = \"/home/xilinx/finn_tfc_end2end_example\"\n",
+    "model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))\n",
+    "model.save(build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\")"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 27,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[key: \"vivado_stitch_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl\"\n",
-       ", key: \"vivado_stitch_vlnv\"\n",
-       "value: \"xilinx_finn:finn:finn_design:1.0\"\n",
-       ", key: \"wrapper_filename\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n",
-       "]"
-      ]
-     },
-     "execution_count": 27,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
    "source": [
-    "model.model.metadata_props"
+    "Let's verify that the remote access credentials is saved in the model metadata, and that the deployment folder has been successfully copied to the board:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 28,
+   "execution_count": 48,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "'/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl'"
+       "[key: \"pynq_driver_dir\"\n",
+       "value: \"/tmp/finn_dev_maltanar/pynq_driver_kl300vbh\"\n",
+       ", key: \"vivado_pynq_proj\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f\"\n",
+       ", key: \"bitfile\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f/resizer.bit\"\n",
+       ", key: \"hw_handoff\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f/resizer.hwh\"\n",
+       ", key: \"vivado_synth_rpt\"\n",
+       "value: \"/tmp/finn_dev_maltanar/vivado_zynq_proj_kdf60v6f/synth_report.xml\"\n",
+       ", key: \"platform\"\n",
+       "value: \"zynq-iodma\"\n",
+       ", key: \"pynq_ip\"\n",
+       "value: \"192.168.2.99\"\n",
+       ", key: \"pynq_port\"\n",
+       "value: \"22\"\n",
+       ", key: \"pynq_username\"\n",
+       "value: \"xilinx\"\n",
+       ", key: \"pynq_password\"\n",
+       "value: \"xilinx\"\n",
+       ", key: \"pynq_target_dir\"\n",
+       "value: \"/home/xilinx/finn_tfc_end2end_example\"\n",
+       ", key: \"pynq_deployment_dir\"\n",
+       "value: \"/tmp/finn_dev_maltanar/pynq_deployment_3wrnn2sp\"\n",
+       ", key: \"pynq_deploy_dir\"\n",
+       "value: \"/tmp/finn_dev_maltanar/pynq_deployment_3wrnn2sp\"\n",
+       ", key: \"exec_mode\"\n",
+       "value: \"remote_pynq\"\n",
+       "]"
       ]
      },
-     "execution_count": 28,
+     "execution_count": 48,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "model.get_metadata_prop(\"vivado_stitch_proj\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "If you navigate to the folder above (remember the /tmp/finn_xxx folder is mounted on the host as well as inside Docker) you can open the Vivado project (.xpr) file there using Vivado, and view the following stitched IP block design:"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "![](stitched_ip.png)"
+    "model.model.metadata_props"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.save(build_dir+\"/tfc_w1_a1_ipstitch.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "At this point, one could take the generated stitched IP and integrate it into your own project using Vivado IP Integrator if desired. Here, we will continue the tutorial by assuming that we want to do a stand-alone deployment for this accelerator for a PYNQ board."
-   ]
-  },
-  {
-   "cell_type": "markdown",
+   "execution_count": 106,
    "metadata": {},
-   "source": [
-    "## 4.  PYNQ hardware generation and deployment <a id='hw_test'></a>\n",
-    "\n",
-    "* [Inserting the IP into a PYNQ Overlay Shell](#pynq_shell)\n",
-    "* [Synthesis, Place and Route](#synth_pl_ro)\n",
-    "* [Driver Generation](#driver_gen)\n",
-    "* [Deployment and Remote Execution](#deploy)\n",
-    "* [Throughput Test on PYNQ Board](#throughput)\n",
-    "\n",
-    "\n",
-    "We are almost done preparing our hardware design. We'll now put it in a form suitable for use as a PYNQ overlay, synthesize and deploy it."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Inserting the IP into a PYNQ Overlay Shell <a id='pynq_shell'></a>\n",
-    "\n",
-    "To deploy our accelerator on a PYNQ platform, it needs to be put inside an appropriate *shell* that bridges it with the interfaces that the underlying system exposes. FINN makes it easy to create a PYNQ-compatible overlay by inserting the stitched IP into an appropriate PYNQ shell with the `MakePYNQProject` transformation, and view the created PYNQ shell project directory using the `metadata_props`. **This invokes Vivado and may take a few minutes to run.**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "scrolled": true
-   },
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "[key: \"vivado_stitch_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl\"\n",
-       ", key: \"vivado_stitch_vlnv\"\n",
-       "value: \"xilinx_finn:finn:finn_design:1.0\"\n",
-       ", key: \"wrapper_filename\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n",
-       ", key: \"vivado_pynq_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs\"\n",
-       ", key: \"vivado_synth_rpt\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs/synth_report.xml\"\n",
-       "]"
+       "'/home/xilinx/finn_tfc_end2end_example/pynq_deployment_3wrnn2sp'"
       ]
      },
-     "execution_count": 30,
+     "execution_count": 106,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject\n",
-    "model = ModelWrapper(build_dir+\"/tfc_w1_a1_ipstitch.onnx\")\n",
-    "model = model.transform(MakePYNQProject(pynq_board))\n",
-    "model.model.metadata_props"
+    "target_dir_pynq = target_dir + \"/\" + model.get_metadata_prop(\"pynq_deployment_dir\").split(\"/\")[-1]\n",
+    "target_dir_pynq"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 31,
+   "execution_count": 107,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "ip_config.tcl\t resizer.cache\tresizer.ip_user_files  resizer.xpr\r\n",
-      "make_project.sh  resizer.hw\tresizer.srcs\t       synth_project.sh\r\n"
+      "total 4236\r\n",
+      "-rw-r--r-- 1 xilinx xilinx    8490 Sep 21 11:06 driver.py\r\n",
+      "drwxr-xr-x 4 xilinx xilinx    4096 Sep 21 11:06 finn\r\n",
+      "-rw-r--r-- 1 xilinx xilinx    3264 Sep 21 12:05 input.npy\r\n",
+      "-rw-r--r-- 1 root   root       205 Sep 21 12:34 nw_metrics.txt\r\n",
+      "-rw-r--r-- 1 root   root        84 Sep 21 12:06 output.npy\r\n",
+      "drwxrwxr-x 2 xilinx xilinx    4096 Sep 21 11:34 __pycache__\r\n",
+      "-rw-r--r-- 1 xilinx xilinx 4045671 Sep 21 11:06 resizer.bit\r\n",
+      "-rw-r--r-- 1 xilinx xilinx  246211 Sep 21 11:06 resizer.hwh\r\n",
+      "-rw-r--r-- 1 root   root        32 Sep 21 12:34 sds_trace_data.dat\r\n",
+      "-rw-r--r-- 1 xilinx xilinx    1727 Sep 21 11:06 validate.py\r\n"
      ]
     }
    ],
    "source": [
-    "! ls {model.get_metadata_prop(\"vivado_pynq_proj\")}"
+    "! sshpass -p {password} ssh {username}@{ip} -p {port} 'ls -l {target_dir_pynq}'"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "If we open the created Vivado project (.xpr) under the `vivado_pynq_proj` directory above, we can see the system-level block design as below, with the FINN-generated part of the design highlighted. Various other components, such as the DMA engine and data width converters, have also been instantiated.\n",
-    "![](pynq_shell_project.png)"
+    "We only have two more steps to be able to remotely execute the deployed bitfile with some test data from the MNIST dataset. Let's load up some test data that comes bundled with FINN."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.save(build_dir + \"/tfc_w1_a1_pynq_project.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
+   "execution_count": 53,
    "metadata": {},
-   "source": [
-    "### Synthesis, Place and Route <a id='synth_pl_ro'></a>"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We are now ready for the final hardware generation step, which is synthesis, place and route to generate an FPGA bitfile. This can be done by either running the `synth_project.sh` script in the generated Vivado PYNQ project directory inside Docker, or by executing the `SynthPYNQProject` transformation. **This step involves launching Vivado for synthesis and may take a few hours.**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "scrolled": true
-   },
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "[key: \"vivado_stitch_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl\"\n",
-       ", key: \"vivado_stitch_vlnv\"\n",
-       "value: \"xilinx_finn:finn:finn_design:1.0\"\n",
-       ", key: \"wrapper_filename\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n",
-       ", key: \"vivado_pynq_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs\"\n",
-       ", key: \"vivado_synth_rpt\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs/synth_report.xml\"\n",
-       ", key: \"vivado_pynq_bitfile\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs/resizer.bit\"\n",
-       "]"
+       "<matplotlib.image.AxesImage at 0x7fe2dd62bf98>"
       ]
      },
-     "execution_count": 33,
+     "execution_count": 53,
      "metadata": {},
      "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAARYElEQVR4nO3dfYyVZXrH8d/FoDAw8iYRCaisG/5QqmUbgk1KyOKmxlUMbKJm/aPauAmarMmqTVqz/UOSaqJVa/pH3YStL9CsmiWoq0a7a82mWo1GNFQQW1CULGR4E5H3t+HqH/NgZ3We6549z3nOc9z7+0kmM3Ouec65OTM/zsv13Pdt7i4Af/xGNT0AAJ1B2IFMEHYgE4QdyARhBzIxupM3Zma89Z+ZUaPKH09OnTpV23VXvf6enp6wPjAw0PJ1183dbbjLK4XdzK6U9M+SeiT9q7vfV+X6cmU27O/mS6k/6ip/eKNHx38CqcCk6r29vaW1Q4cOhcem9PX1hfUDBw6U1lIt50mTJoX1zz77LKx3o5afxptZj6R/kfR9SRdLusHMLm7XwAC0V5XX7PMlfeTuW9z9uKSnJS1pz7AAtFuVsM+Q9Lsh328rLvs9ZrbMzNaa2doKtwWgotrfoHP3FZJWSLxBBzSpyiP7dknnDfl+ZnEZgC5UJezvSJptZt8yszMl/VDS8+0ZFoB2a/lpvLufNLPbJP1ag623x9z9g7aNLCPjx48P6wcPHmz5useMGRPWjx07FtZTbcFx48aF9ai9lmoppqSOj9prqT76vn37WhlSV6v0mt3dX5L0UpvGAqBGnC4LZIKwA5kg7EAmCDuQCcIOZIKwA5mwTq4um+vpsqled6qXffTo0bA+duzYlo9Nia676vWfffbZYb3qNNLofp06dWp47O7du8N6amrwyZMnw3qdyuaz88gOZIKwA5kg7EAmCDuQCcIOZIKwA5mg9fYNkGrNVfkd1nnddUtNDa6yem1q6m5qanCTS03TegMyR9iBTBB2IBOEHcgEYQcyQdiBTBB2IBP02TvgrLPOCuvRbqOSNHHixLB+4sSJ0lpqN9LUFNbPP/88rC9YsCCs33rrraW1VC/6jjvuCOtbt24N601OM20SfXYgc4QdyARhBzJB2IFMEHYgE4QdyARhBzJBn/0b4JFHHgnrUS871Wuuuox1b29vWI+ktk2+5JJLwvqmTZvC+vHjx0trZ5xxRnhsdO6ClP53HzlyJKzXqazPXmnLZjP7VNIBSQOSTrr7vCrXB6A+lcJeWOTue9pwPQBqxGt2IBNVw+6SfmNm75rZsuF+wMyWmdlaM1tb8bYAVFD1afwCd99uZudIesXM/sfdXxv6A+6+QtIKiTfogCZVemR39+3F512SnpU0vx2DAtB+LYfdzMab2Vmnv5Z0haQN7RoYgPaq8jR+mqRniz7taElPuvu/t2VUf2RSWzYvWrQorF922WVhPeqVHzx4MDw21W/u6+sL66nzNKI566m11x999NGWr1uS7rzzztLaW2+9FR5b93bSTWg57O6+RdKftnEsAGpE6w3IBGEHMkHYgUwQdiAThB3IBFNcu0Bqqubs2bPD+v79+0trEyZMCI+NpoFK6SmwVbZ8TrX9UlJLcO/du7e0tnTp0vDYdevWhfVUSzLV8qwTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJdiw42TFRT7fOfnBK6thU/ZZbbgnrq1atCuszZ85s+bZTffZ77rknrK9evTqsn3nmmaW1K664Ijz2wQcfDOuprbCj2168eHF47LZt28L6nj3fvDVWeWQHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiATHZ/Pnup3Rzo51naqOvd54cKFYf2iiy4qrY0bNy48dvTo+FSLNWvWhPUtW7aE9SpSyz3PmTMnrKfu90jq75T57AC6FmEHMkHYgUwQdiAThB3IBGEHMkHYgUx0vM8+alT5/y9V54XXqcpc+lOnTlW67eg+S9VPnjwZHjt+/PiwfujQobCe2o46+p2l5tJfffXVYf3pp58O61X67Kk17VP3a5Na7rOb2WNmtsvMNgy5bIqZvWJmm4vPk9s5WADtN5Kn8U9IuvIrl90l6VV3ny3p1eJ7AF0sGXZ3f03SV/fRWSJpZfH1SklL2zssAO3W6hp009y9v/h6h6RpZT9oZsskLWvxdgC0SeUFJ93dow0b3X2FpBUSGzsCTWq19bbTzKZLUvF5V/uGBKAOrYb9eUk3FV/fJOlX7RkOgLok++xm9pSk70qaKmmnpLslPSfpl5LOl7RV0vXuXr4Z9v9fV21P46uuG1+1Hkn1ZFN7qEf7r1fV29sb1o8cORLWU+cAVDnH4MILLwzrH3/8ccvXnRpXak36lMOHD1c6voqyPnvyNbu731BS+l6lEQHoKE6XBTJB2IFMEHYgE4QdyARhBzLBls2FVAtyYGAgrEd6enrCetVlh6M2UarFlJrCmpK6/mjb5KgmSYsWLWppTKdFv9MTJ06Ex6amuFb5e2gKj+xAJgg7kAnCDmSCsAOZIOxAJgg7kAnCDmSiq/rsdW7nXHU55yrqvu0DBw6U1lL94lSvO3V8qk8fLRedWsb6uuuuC+tHjx4N62PHji2tpfrsqd9Zk1syt4pHdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMtHxPns0t7ube+XRksmp5ZRT6txW+dJLLw2PnTNnTlhPLSX93HPPhfVI1AeXpIULF4b1Klt4p5ahjs5dkKovwd0EHtmBTBB2IBOEHcgEYQcyQdiBTBB2IBOEHchEx/vs0Zz1OvvoqbnyqXndUU949Oj4bly6dGlYTx2/ZMmSsD5mzJjS2ty5c8NjJ02aFNZTvezXX3+95eNnz54dHptamz3V616/fn1p7fLLLw+Pje5TqTv76CnJR3Yze8zMdpnZhiGXLTez7Wa2rvi4qt5hAqhqJE/jn5B05TCXP+zuc4uPl9o7LADtlgy7u78maW8HxgKgRlXeoLvNzN4vnuZPLvshM1tmZmvNbG2F2wJQUath/5mkb0uaK6lf0kNlP+juK9x9nrvPa/G2ALRBS2F3953uPuDupyT9XNL89g4LQLu1FHYzmz7k2x9I2lD2swC6g6X6qGb2lKTvSpoqaaeku4vv50pySZ9KusXd+5M3ZhbeWKrfnJr3HZk1a1ZYv+aaa8L64sWLS2upedepedupudPR/utSvIZ5X19feGxK1Xnd0e/0iy++CI+dOHFiWE/ZvHlzaW3VqlXhsQ89VPrKVFJ399ndfdiTSpIn1bj7DcNc/GjlEQHoKE6XBTJB2IFMEHYgE4QdyARhBzKRbL219cbMPFp2uc4prnfffXdYX758eVjfs2dPaW3q1KmtDOlLqa2H9+6NpyZE9QsuuCA8NtUWTG3ZnHLs2LHSWmoaaervIdWKjaYtp7Zcfvnll8P6zTffHNab3NK5rPXGIzuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5noeJ89qlfZmjg11TLV96yy7fKuXbvC+tatW8P6Aw88ENZXr14d1ufNK18E6OGHHw6PTW3ZPHly6YpjkqRt27aF9eh3+sQTT4THfvLJJ2H92muvDevR1OOq02tffPHFsJ6aMl0n+uxA5gg7kAnCDmSCsAOZIOxAJgg7kAnCDmSio332UaNGeTQ/+vjx4+Hx55xzTmlt9+7d4bGpPntq7nTUL05tB71p06awPmXKlLCeWrY4Wu75/PPPD49NzWdPLe+9b9++sH7jjTeW1l544YXw2JTUOgLRctGLFi0Kj02tMZC6X1LLf9eJPjuQOcIOZIKwA5kg7EAmCDuQCcIOZIKwA5noqvnsVaT6nitXrgzr119/fcvXf/jw4fDYcePGhfXUtsipef4DAwOltdS672+++WZYf/LJJ8P6unXrwvobb7xRWkudX5Dq4ad+59F5G/Pnzw+Pffvtt8P6448/HtZT68rXqeU+u5mdZ2a/NbONZvaBmf2kuHyKmb1iZpuLz/EqBwAaNZKn8Scl/Y27XyzpzyX92MwulnSXpFfdfbakV4vvAXSpZNjdvd/d3yu+PiDpQ0kzJC2RdPq58UpJS2saI4A2iF/0fIWZzZL0HUlvS5rm7v1FaYekaSXHLJO0rMIYAbTBiN+NN7M+SWsk3e7u+4fWfPBdvmHffHP3Fe4+z93LV0UEULsRhd3MztBg0H/h7s8UF+80s+lFfbqkeIlVAI1Ktt5scP7mSkl73f32IZc/IOkzd7/PzO6SNMXd/zZxXeGNnXvuueFYduzYEdYj0fa9kjRz5sywfu+995bWZsyYER6b2nI5tXVxtF20JN1///2ltY0bN4bHpqa4prZFTklNW46k2oYnTpwI69HU49Tf/YQJE8J61SnTdSprvY3kNftfSPorSevNbF1x2U8l3Sfpl2b2I0lbJcWNagCNSobd3f9LUtl/kd9r73AA1IXTZYFMEHYgE4QdyARhBzJB2IFMdHSKa09Pj0d93dRU0aj3uX///tKaJPX19YX1VN806vlW6fdK6Z5v6hyBqJed6uEfO3YsrFcV/b5TyzWnpgan/l6q/M5Sqo6tTiwlDWSOsAOZIOxAJgg7kAnCDmSCsAOZIOxAJrpqKenUHOKol55aVrjqvOzp06eX1vr7+0trI9Hb2xvWU1s213ndqWWsDx06FNarzClPGTUqfqyqMqe86fMTqqDPDmSOsAOZIOxAJgg7kAnCDmSCsAOZIOxAJrqqzw6gOvrsQOYIO5AJwg5kgrADmSDsQCYIO5AJwg5kIhl2MzvPzH5rZhvN7AMz+0lx+XIz225m64qPq+ofLoBWJU+qMbPpkqa7+3tmdpakdyUt1eB+7Afd/cER3xgn1QC1KzupZiT7s/dL6i++PmBmH0qa0d7hAajbH/Sa3cxmSfqOpLeLi24zs/fN7DEzm1xyzDIzW2tma6sNFUAVIz433sz6JP2npHvd/RkzmyZpjySX9A8afKp/c+I6eBoP1KzsafyIwm5mZ0h6UdKv3f2fhqnPkvSiu/9J4noIO1CzlifC2ODyoI9K+nBo0Is37k77gaQNVQcJoD4jeTd+gaTXJa2XdHpt3p9KukHSXA0+jf9U0i3Fm3nRdfHIDtSs0tP4diHsQP2Yzw5kjrADmSDsQCYIO5AJwg5kgrADmSDsQCYIO5AJwg5kgrADmSDsQCYIO5AJwg5kgrADmUguONlmeyRtHfL91OKybtStY+vWcUmMrVXtHNsFZYWOzmf/2o2brXX3eY0NINCtY+vWcUmMrVWdGhtP44FMEHYgE02HfUXDtx/p1rF167gkxtaqjoyt0dfsADqn6Ud2AB1C2IFMNBJ2M7vSzP7XzD4ys7uaGEMZM/vUzNYX21A3uj9dsYfeLjPbMOSyKWb2ipltLj4Pu8deQ2Prim28g23GG73vmt7+vOOv2c2sR9ImSX8paZukdyTd4O4bOzqQEmb2qaR57t74CRhmtlDSQUmrTm+tZWb/KGmvu99X/Ec52d3/rkvGtlx/4DbeNY2tbJvxv1aD9107tz9vRROP7PMlfeTuW9z9uKSnJS1pYBxdz91fk7T3KxcvkbSy+HqlBv9YOq5kbF3B3fvd/b3i6wOSTm8z3uh9F4yrI5oI+wxJvxvy/TZ1137vLuk3ZvaumS1rejDDmDZkm60dkqY1OZhhJLfx7qSvbDPeNfddK9ufV8UbdF+3wN3/TNL3Jf24eLralXzwNVg39U5/JunbGtwDsF/SQ00OpthmfI2k2919/9Bak/fdMOPqyP3WRNi3SzpvyPczi8u6grtvLz7vkvSsBl92dJOdp3fQLT7vang8X3L3ne4+4O6nJP1cDd53xTbjayT9wt2fKS5u/L4bblydut+aCPs7kmab2bfM7ExJP5T0fAPj+BozG1+8cSIzGy/pCnXfVtTPS7qp+PomSb9qcCy/p1u28S7bZlwN33eNb3/u7h3/kHSVBt+R/1jS3zcxhpJxXSjpv4uPD5oem6SnNPi07oQG39v4kaSzJb0qabOk/5A0pYvG9m8a3Nr7fQ0Ga3pDY1ugwafo70taV3xc1fR9F4yrI/cbp8sCmeANOiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMvF/rSIwqVQD1iIAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
     }
    ],
    "source": [
-    "from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject\n",
-    "model = ModelWrapper(build_dir + \"/tfc_w1_a1_pynq_project.onnx\")\n",
-    "model = model.transform(SynthPYNQProject())\n",
-    "model.model.metadata_props"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.save(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Driver Generation <a id='driver_gen'></a>\n",
+    "from pkgutil import get_data\n",
+    "import onnx.numpy_helper as nph\n",
+    "import matplotlib.pyplot as plt\n",
     "\n",
-    "Now that we have synthesized a bitfile for our network, we will generate some Python code for PYNQ that will act as the driver for this bitfile, package everything into a deployment folder and copy that to our PYNQ board."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver\n",
-    "model = ModelWrapper(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")\n",
-    "model = model.transform(MakePYNQDriver())"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "The generated driver is placed in a folder that is indicated by the `pynq_driver_dir` top-level metadata. We can examine the generated PYNQ Python driver code as follows:"
+    "raw_i = get_data(\"finn\", \"data/onnx/mnist-conv/test_data_set_0/input_0.pb\")\n",
+    "x = nph.to_array(onnx.load_tensor_from_string(raw_i))\n",
+    "plt.imshow(x.reshape(28,28), cmap='gray')"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 36,
+   "execution_count": 92,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "\r\n",
-      "import argparse\r\n",
-      "\r\n",
-      "from pynq import Overlay\r\n",
-      "import numpy as np\r\n",
-      "from pynq import allocate\r\n",
-      "import time\r\n",
-      "from finn.util.data_packing import (\r\n",
-      "    finnpy_to_packed_bytearray,\r\n",
-      "    packed_bytearray_to_finnpy\r\n",
-      ")\r\n",
-      "from finn.core.datatype import DataType\r\n",
-      "\r\n",
-      "class FINNAccelDriver():\r\n",
-      "    def __init__(self, N, bitfile):\r\n",
-      "        \"\"\"Instantiate the FINN accelerator driver.\r\n",
-      "        Gets batchsize (N) as integer and path to bitfile as string.\"\"\"\r\n",
-      "        self.N = N\r\n",
-      "        # input FINN DataType\r\n",
-      "        self.idt = DataType.BINARY\r\n",
-      "        # output FINN DataType\r\n",
-      "        self.odt = DataType.UINT32\r\n",
-      "        # input and output shapes\r\n",
-      "        self.ishape_normal = (N, 784)\r\n",
-      "        self.oshape_normal = (N, 10)\r\n",
-      "        self.ishape_folded = (N, 16, 49)\r\n",
-      "        self.oshape_folded = (N, 1, 10)\r\n",
-      "        self.ishape_packed = (N, 16, 7)   # datatype np.uint8\r\n",
-      "        self.oshape_packed = (N, 1, 40)  # datatype np.uint8\r\n",
-      "        # load bitfile and set up accelerator\r\n",
-      "        self.ol = Overlay(bitfile)\r\n",
-      "        self.dma = self.ol.axi_dma_0\r\n",
-      "        self.ctrl_regs = self.ol.resize_accel_0\r\n",
-      "        # neuron folding factor of output = iterations per sample\r\n",
-      "        self.itersPerSample = self.oshape_packed[-2]\r\n",
-      "        # AXI lite register offset for number of iterations\r\n",
-      "        # used by TLastMarker to signal end of transmission for AXI CDMA\r\n",
-      "        self.REG_OFFSET_NUM_ITERS = 0x10\r\n",
-      "        # set up TLastMarker with correct num. samples\r\n",
-      "        self.ctrl_regs.write(self.REG_OFFSET_NUM_ITERS, self.N*self.itersPerSample)\r\n",
-      "\r\n",
-      "        # allocate a PYNQ buffer for the packed input and buffer\r\n",
-      "        self.ibuf_packed_device = allocate(shape=self.ishape_packed, dtype=np.uint8)\r\n",
-      "        self.obuf_packed_device = allocate(shape=self.oshape_packed, dtype=np.uint8)\r\n",
-      "\r\n",
-      "    def fold_input(self, ibuf_normal):\r\n",
-      "        \"\"\"Reshapes input in desired shape.\r\n",
-      "        Gets input data (ibuf_normal), checks if data is in expected normal shape.\r\n",
-      "        Returns folded input.\"\"\"\r\n",
-      "        # ensure that shape is as expected\r\n",
-      "        assert ibuf_normal.shape == self.ishape_normal\r\n",
-      "        # convert to folded form\r\n",
-      "        ibuf_folded = ibuf_normal.reshape(self.ishape_folded)\r\n",
-      "        return ibuf_folded\r\n",
-      "\r\n",
-      "    def pack_input(self, ibuf_folded):\r\n",
-      "        \"\"\"Packs folded input and reverses both SIMD dim and endianness.\r\n",
-      "        Gets input data in folded shape and returns packed input data.\"\"\"\r\n",
-      "        ibuf_packed = finnpy_to_packed_bytearray(\r\n",
-      "            ibuf_folded, self.idt, reverse_endian=True, reverse_inner=True\r\n",
-      "        )\r\n",
-      "        return ibuf_packed\r\n",
-      "\r\n",
-      "    def unpack_output(self, obuf_packed):\r\n",
-      "        \"\"\"Unpacks the packed output buffer from accelerator.\r\n",
-      "        Gets packed output and returns output data in folded shape.\"\"\"\r\n",
-      "        obuf_folded = packed_bytearray_to_finnpy(\r\n",
-      "            obuf_packed, self.odt, self.oshape_folded, reverse_endian=True, reverse_inner=True\r\n",
-      "        )\r\n",
-      "        return obuf_folded\r\n",
-      "\r\n",
-      "    def unfold_output(self, obuf_folded):\r\n",
-      "        \"\"\"Unfolds output data to normal shape.\r\n",
-      "        Gets folded output data and returns output data in normal shape.\"\"\"\r\n",
-      "        obuf_normal = obuf_folded.reshape(self.oshape_normal)\r\n",
-      "        return obuf_normal\r\n",
-      "\r\n",
-      "    def copy_input_data_to_device(self, data):\r\n",
-      "        \"\"\"Copies given input data to PYNQ buffer.\"\"\"\r\n",
-      "        np.copyto(self.ibuf_packed_device, data)\r\n",
-      "\r\n",
-      "    def execute(self):\r\n",
-      "        \"\"\"Executes accelerator by setting up the DMA and\r\n",
-      "        waiting until all transfers complete. Uses only member variables and\r\n",
-      "        returns nothing.\"\"\"\r\n",
-      "        dma = self.dma\r\n",
-      "        dma.sendchannel.transfer(self.ibuf_packed_device)\r\n",
-      "        dma.recvchannel.transfer(self.obuf_packed_device)\r\n",
-      "        dma.sendchannel.wait()\r\n",
-      "        dma.recvchannel.wait()\r\n",
-      "\r\n",
-      "\r\n",
-      "if __name__ == \"__main__\":\r\n",
-      "    parser = argparse.ArgumentParser(description='Set exec mode, batchsize N, bitfile name, inputfile name and outputfile name')\r\n",
-      "    parser.add_argument('--exec_mode', help='Please select functional verification (\"execute\") or throughput test (\"throughput_test\")', default=\"execute\")\r\n",
-      "    parser.add_argument('--batchsize', help='number of samples for inference', type=int, default=1)\r\n",
-      "    parser.add_argument('--bitfile', help='name of bitfile (i.e. \"resizer.bit\")', default=\"resizer.bit\")\r\n",
-      "    parser.add_argument('--inputfile', help='name of input npy file (i.e. \"input.npy\")', default=\"input.npy\")\r\n",
-      "    parser.add_argument('--outputfile', help='name of output npy file (i.e. \"output.npy\")', default=\"output.npy\")\r\n",
-      "    # parse arguments\r\n",
-      "    args = parser.parse_args()\r\n",
-      "    exec_mode = args.exec_mode\r\n",
-      "    N = args.batchsize\r\n",
-      "    bitfile = args.bitfile\r\n",
-      "    inputfile = args.inputfile\r\n",
-      "    outputfile = args.outputfile\r\n",
-      "\r\n",
-      "    # instantiate FINN accelerator driver and pass batchsize and bitfile\r\n",
-      "    finnDriver = FINNAccelDriver(N, bitfile)\r\n",
-      "\r\n",
-      "    # for the remote execution the data from the input npy file has to be loaded,\r\n",
-      "    # packed and copied to the PYNQ buffer\r\n",
-      "    if exec_mode == \"execute\":\r\n",
-      "        # load desired input .npy file\r\n",
-      "        ibuf_normal = np.load(inputfile)\r\n",
-      "        ibuf_folded = finnDriver.fold_input(ibuf_normal)\r\n",
-      "        ibuf_packed = finnDriver.pack_input(ibuf_folded)\r\n",
-      "        finnDriver.copy_input_data_to_device(ibuf_packed)\r\n",
-      "    elif exec_mode != \"throughput_test\":\r\n",
-      "        raise Exception(\"Exec mode has to be set to remote_pynq or throughput_test\")\r\n",
-      "\r\n",
-      "    # for the throughput test the runtime of the network has to be measured\r\n",
-      "    if exec_mode == \"throughput_test\":\r\n",
-      "        # measure runtime of network\r\n",
-      "        start = time.time()\r\n",
-      "        # dictionary for results of throughput test\r\n",
-      "        res={}\r\n",
-      "\r\n",
-      "    # execute accelerator\r\n",
-      "    finnDriver.execute()\r\n",
-      "\r\n",
-      "    # measure run time and fill dictionary with results of the throughput test\r\n",
-      "    if exec_mode == \"throughput_test\":\r\n",
-      "        end = time.time()\r\n",
-      "        runtime = end - start\r\n",
-      "        res[\"runtime[ms]\"] = runtime*1000\r\n",
-      "        res[\"throughput[images/s]\"] = N / runtime\r\n",
-      "        res[\"DRAM_in_bandwidth[Mb/s]\"] = np.prod(finnDriver.ishape_packed)*0.000001 / runtime\r\n",
-      "        res[\"DRAM_out_bandwidth[Mb/s]\"] = np.prod(finnDriver.oshape_packed)*0.000001 / runtime\r\n",
-      "        file = open(\"nw_metrics.txt\", \"w\")\r\n",
-      "        file.write(str(res))\r\n",
-      "        file.close()\r\n",
-      "\r\n",
-      "    # if execution is selected unpack, unfold and save output to output npy file\r\n",
-      "    else:\r\n",
-      "        obuf_folded = finnDriver.unpack_output(finnDriver.obuf_packed_device)\r\n",
-      "        obuf_normal = finnDriver.unfold_output(obuf_folded)\r\n",
-      "        np.save(outputfile, obuf_normal)\r\n",
-      "\r\n",
-      "\r\n"
+      "Expected network input shape is [1, 784]\n"
      ]
     }
    ],
    "source": [
-    "driver_dir = model.get_metadata_prop(\"pynq_driver_dir\")\n",
-    "! cat {driver_dir}/driver.py"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We can see that in the generated driver a class is implemented which implements the FINN accelerator. The constructor gets the batchsize (N) as integer and the bitfile as string. It also contains the expected input/output shapes, and takes care of the instantiation of the accelerator by loading the bitfile and setting up dma and buffer. Several member functions take care of the data folding and packing. The function `copy_input_data_to_device` copies the input data into the PYNQ buffer and `execute` sets up the dma channels and waits until the transfer is completed. This class is used in the main function. But first the arguments are parsed, which are passed to the script. The driver can be used in two modes: \"execute\" and \"throughput_test\". By default all arguments are set to \"execute\" mode. In this mode the batch size is 1, and the passed files are set to the names used by the FINN transformations.\n",
-    "\n",
-    "In the \"execute\" mode works as follows:\n",
-    "1. the data is loaded from the \"inputfile\"\n",
-    "2. the data is folded using `fold_input`\n",
-    "3. the data is packed using `pack_input`\n",
-    "4. the data is copied to the device using `copy_input_data_to_device`\n",
-    "5. FINNAccelDriver is executed using `execute`\n",
-    "6. the data is unpacked using `unpack_output`\n",
-    "7. the data is unfolded using `unfold_output`\n",
-    "8. the data is stored in the \"outputfile\"\n",
-    "\n",
-    "If \"throughput_test\" is selected as `exec_mode`, no actual data needs to be loaded. The batchsize N should be set to a high value (i.e. 1000) and a time measurement is implemented in python. An empty dictionary (`res`) is created and after running the accelerator with the measured runtime it is filled with the metrics and saved in a .txt file.\n",
-    "\n",
-    "You can build your own applications around the accelerator by modifying the driver, or use the remote execution capabilities that FINN provides just to check if it is working, which will be our next step."
+    "model = ModelWrapper(build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\")\n",
+    "iname = model.graph.input[0].name\n",
+    "oname = parent_model.graph.output[0].name\n",
+    "ishape = model.get_tensor_shape(iname)\n",
+    "print(\"Expected network input shape is \" + str(ishape))"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Deployment and Remote Execution <a id='deploy'></a>\n",
-    "\n",
-    "We'll now use the `DeployToPYNQ` transformation to create a deployment folder with the bitfile and driver file(s), and copy that to the PYNQ board. You can change the default IP address, username, password and target folder for the PYNQ below."
+    "Finally, we can call `execute_onnx` on the graph, which will internally call remote execution with the bitfile, grab the results and return a numpy array. You may recall that one \"reshape\" node was left out of the StreamingDataflowPartition. We'll do that manually with a numpy function call when passing in the input, but everything else in the network ended up inside the StreamingDataflowPartition so that's all we need to do."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 37,
+   "execution_count": 95,
    "metadata": {},
    "outputs": [],
    "source": [
-    "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n",
-    "ip = \"192.168.3.1\"\n",
-    "port = \"22\"\n",
-    "username = \"xilinx\"\n",
-    "password = \"xilinx\"\n",
-    "target_dir = \"/home/xilinx/finn_tfc_end2end_example\"\n",
-    "model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))\n",
-    "model.save(build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Let's verify that the remote access credentials is saved in the model metadata, and that the deployment folder has been successfully copied to the board:"
+    "import numpy as np\n",
+    "from finn.core.onnx_exec import execute_onnx\n",
+    "\n",
+    "input_dict = {iname: x.reshape(ishape)}\n",
+    "ret = execute_onnx(model, input_dict)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 38,
+   "execution_count": 96,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "[key: \"vivado_stitch_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl\"\n",
-       ", key: \"vivado_stitch_vlnv\"\n",
-       "value: \"xilinx_finn:finn:finn_design:1.0\"\n",
-       ", key: \"wrapper_filename\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_stitch_proj_oa43bqzl/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n",
-       ", key: \"vivado_pynq_proj\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs\"\n",
-       ", key: \"vivado_synth_rpt\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs/synth_report.xml\"\n",
-       ", key: \"vivado_pynq_bitfile\"\n",
-       "value: \"/tmp/finn_dev_jakobap/vivado_pynq_proj_ljn53hfs/resizer.bit\"\n",
-       ", key: \"pynq_driver_dir\"\n",
-       "value: \"/tmp/finn_dev_jakobap/pynq_driver_j_9suyqm\"\n",
-       ", key: \"pynq_ip\"\n",
-       "value: \"51.37.47.42\"\n",
-       ", key: \"pynq_port\"\n",
-       "value: \"23\"\n",
-       ", key: \"pynq_username\"\n",
-       "value: \"xilinx\"\n",
-       ", key: \"pynq_password\"\n",
-       "value: \"x1l1nx_f!nn\"\n",
-       ", key: \"pynq_target_dir\"\n",
-       "value: \"/home/xilinx/finn_tfc_end2end_example\"\n",
-       ", key: \"pynq_deployment_dir\"\n",
-       "value: \"/tmp/finn_dev_jakobap/pynq_deployment_962qxwkv\"\n",
-       ", key: \"pynq_deploy_dir\"\n",
-       "value: \"/tmp/finn_dev_jakobap/pynq_deployment_962qxwkv\"\n",
-       ", key: \"exec_mode\"\n",
-       "value: \"remote_pynq\"\n",
-       "]"
+       "array([[2.]], dtype=float32)"
       ]
      },
-     "execution_count": 38,
+     "execution_count": 96,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "model.model.metadata_props"
+    "ret[oname]"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 39,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "/home/xilinx/finn_tfc_end2end_example/pynq_deployment_26e8h5jo:\r\n",
-      "total 4276\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    6363 May  7 10:35 driver.py\r\n",
-      "drwxr-xr-x 4 xilinx xilinx    4096 May  7 10:35 finn\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    3264 May  7 10:55 input.npy\r\n",
-      "-rw-r--r-- 1 root   root       172 May  7 10:37 nw_metrics.txt\r\n",
-      "-rw-r--r-- 1 root   root       120 May  7 10:55 output.npy\r\n",
-      "-rw-r--r-- 1 xilinx xilinx 4045675 May  7 10:35 resizer.bit\r\n",
-      "-rw-r--r-- 1 xilinx xilinx  302015 May  7 10:35 resizer.hwh\r\n",
-      "-rw-r--r-- 1 root   root        32 May  7 10:55 sds_trace_data.dat\r\n",
-      "\r\n",
-      "/home/xilinx/finn_tfc_end2end_example/pynq_deployment_962qxwkv:\r\n",
-      "total 4260\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    6363 May  7 17:44 driver.py\r\n",
-      "drwxr-xr-x 4 xilinx xilinx    4096 May  7 17:44 finn\r\n",
-      "-rw-r--r-- 1 xilinx xilinx 4045675 May  7 17:44 resizer.bit\r\n",
-      "-rw-r--r-- 1 xilinx xilinx  302015 May  7 17:44 resizer.hwh\r\n",
-      "\r\n",
-      "/home/xilinx/finn_tfc_end2end_example/pynq_deployment_kvurnk0c:\r\n",
-      "total 4300\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    3861 Apr 27 12:36 driver.py\r\n",
-      "drwxr-xr-x 4 xilinx xilinx    4096 Apr 27 12:37 finn\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    3264 Apr 27 12:37 input.npy\r\n",
-      "-rw-r--r-- 1 root   root        78 Apr 27 12:38 nw_metrics.txt\r\n",
-      "-rw-r--r-- 1 root   root       120 Apr 27 12:37 output.npy\r\n",
-      "-rw-r--r-- 1 xilinx xilinx 4045675 Apr 27 12:36 resizer.bit\r\n",
-      "-rw-r--r-- 1 xilinx xilinx  329531 Apr 27 12:36 resizer.hwh\r\n",
-      "-rw-r--r-- 1 root   root        32 Apr 27 12:38 sds_trace_data.dat\r\n",
-      "\r\n",
-      "/home/xilinx/finn_tfc_end2end_example/pynq_deployment__tnbutz_:\r\n",
-      "total 4276\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    6363 May  6 17:34 driver.py\r\n",
-      "drwxr-xr-x 4 xilinx xilinx    4096 May  6 17:34 finn\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    3264 May  6 17:34 input.npy\r\n",
-      "-rw-r--r-- 1 root   root       173 May  6 17:35 nw_metrics.txt\r\n",
-      "-rw-r--r-- 1 root   root       120 May  6 17:34 output.npy\r\n",
-      "-rw-r--r-- 1 xilinx xilinx 4045675 May  6 17:34 resizer.bit\r\n",
-      "-rw-r--r-- 1 xilinx xilinx  302015 May  6 17:34 resizer.hwh\r\n",
-      "-rw-r--r-- 1 root   root        32 May  6 17:35 sds_trace_data.dat\r\n",
-      "\r\n",
-      "/home/xilinx/finn_tfc_end2end_example/pynq_deployment_w4aa1r9k:\r\n",
-      "total 4276\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    6363 May  7 15:05 driver.py\r\n",
-      "drwxr-xr-x 4 xilinx xilinx    4096 May  7 15:05 finn\r\n",
-      "-rw-r--r-- 1 xilinx xilinx    3264 May  7 15:06 input.npy\r\n",
-      "-rw-r--r-- 1 root   root       172 May  7 15:11 nw_metrics.txt\r\n",
-      "-rw-r--r-- 1 root   root       120 May  7 15:06 output.npy\r\n",
-      "-rw-r--r-- 1 xilinx xilinx 4045675 May  7 15:05 resizer.bit\r\n",
-      "-rw-r--r-- 1 xilinx xilinx  302015 May  7 15:05 resizer.hwh\r\n",
-      "-rw-r--r-- 1 root   root        32 May  7 15:11 sds_trace_data.dat\r\n"
-     ]
-    }
-   ],
    "source": [
-    "! sshpass -p {password} ssh {username}@{ip} -p {port} 'ls -l {target_dir}/*'"
+    "We see that the network correctly predicts this as a digit 2."
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We only have two more steps to be able to remotely execute the deployed bitfile with some test data from the MNIST dataset. Let's load up some test data that comes bundled with FINN."
+    "### Validating the Accuracy on a PYNQ Board <a id='validation'></a>\n",
+    "\n",
+    "All the command line prompts here are meant to be executed with `sudo` on the PYNQ board, so we'll use a workaround (`sshpass` and `echo password | sudo -S command`) to get that working from this notebook running on the host computer.\n",
+    "\n",
+    "**Ensure that your PYNQ board has a working internet connecting for the next steps, since some there is some downloading involved.**\n",
+    "\n",
+    "To validate the accuracy, we first need to install the [`dataset-loading`](https://github.com/fbcotter/dataset_loading) Python package to the PYNQ board. This will give us a convenient way of downloading and accessing the MNIST dataset.\n",
+    "\n",
+    "\n",
+    "Command to execute on PYNQ:\n",
+    "\n",
+    "```pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading```"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 41,
+   "execution_count": 75,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "<matplotlib.image.AxesImage at 0x7fe11dda48d0>"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAARX0lEQVR4nO3dfYyVZXrH8d/FoDAw8iYRCaisG/5QqmUbgk1KyOKmxlUMbKJm/aPauAmarMmqTVqz/UOSaqJVa/pH3YStL9CsmiWoq0a7a82mWo1GNFQQW1CULGR4E5H3t+HqH/NgZ3We6549z3nOc9z7+0kmM3Ouec65OTM/zsv13Pdt7i4Af/xGNT0AAJ1B2IFMEHYgE4QdyARhBzIxupM3Zma89Z+ZUaPKH09OnTpV23VXvf6enp6wPjAw0PJ1183dbbjLK4XdzK6U9M+SeiT9q7vfV+X6cmU27O/mS6k/6ip/eKNHx38CqcCk6r29vaW1Q4cOhcem9PX1hfUDBw6U1lIt50mTJoX1zz77LKx3o5afxptZj6R/kfR9SRdLusHMLm7XwAC0V5XX7PMlfeTuW9z9uKSnJS1pz7AAtFuVsM+Q9Lsh328rLvs9ZrbMzNaa2doKtwWgotrfoHP3FZJWSLxBBzSpyiP7dknnDfl+ZnEZgC5UJezvSJptZt8yszMl/VDS8+0ZFoB2a/lpvLufNLPbJP1ag623x9z9g7aNLCPjx48P6wcPHmz5useMGRPWjx07FtZTbcFx48aF9ai9lmoppqSOj9prqT76vn37WhpTN6v0mt3dX5L0UpvGAqBGnC4LZIKwA5kg7EAmCDuQCcIOZIKwA5mwTq4um+vpsqled6qXffTo0bA+duzYlo9Nia676vWfffbZYb3qNNLofp06dWp47O7du8N6amrwyZMnw3qdyuaz88gOZIKwA5kg7EAmCDuQCcIOZIKwA5mg9fYNkGrNVfkd1nnddUtNDa6yem1q6m5qanCTS03TegMyR9iBTBB2IBOEHcgEYQcyQdiBTBB2IBP02TvgrLPOCuvRbqOSNHHixLB+4sSJ0lpqN9LUFNbPP/88rC9YsCCs33rrraW1VC/6jjvuCOtbt24N601OM20SfXYgc4QdyARhBzJB2IFMEHYgE4QdyARhBzJBn/0b4JFHHgnrUS871Wuuuox1b29vWI+ktk2+5JJLwvqmTZvC+vHjx0trZ5xxRnhsdO6ClP53HzlyJKzXqazPXmnLZjP7VNIBSQOSTrr7vCrXB6A+lcJeWOTue9pwPQBqxGt2IBNVw+6SfmNm75rZsuF+wMyWmdlaM1tb8bYAVFD1afwCd99uZudIesXM/sfdXxv6A+6+QtIKiTfogCZVemR39+3F512SnpU0vx2DAtB+LYfdzMab2Vmnv5Z0haQN7RoYgPaq8jR+mqRniz7taElPuvu/t2VUf2RSWzYvWrQorF922WVhPeqVHzx4MDw21W/u6+sL66nzNKI566m11x999NGWr1uS7rzzztLaW2+9FR5b93bSTWg57O6+RdKftnEsAGpE6w3IBGEHMkHYgUwQdiAThB3IBFNcu0Bqqubs2bPD+v79+0trEyZMCI+NpoFK6SmwVbZ8TrX9UlJLcO/du7e0tnTp0vDYdevWhfVUSzLV8qwTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJdiw42TFRT7fOfnBK6thU/ZZbbgnrq1atCuszZ85s+bZTffZ77rknrK9evTqsn3nmmaW1K664Ijz2wQcfDOuprbCj2168eHF47LZt28L6nj3fvDVWeWQHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiATHZ/Pnup3Rzo51naqOvd54cKFYf2iiy4qrY0bNy48dvTo+FSLNWvWhPUtW7aE9SpSyz3PmTMnrKfu90jq75T57AC6FmEHMkHYgUwQdiAThB3IBGEHMkHYgUx0vM8+alT5/y9V54XXqcpc+lOnTlW67eg+S9VPnjwZHjt+/PiwfujQobCe2o46+p2l5tJfffXVYf3pp58O61X67Kk17VP3a5Na7rOb2WNmtsvMNgy5bIqZvWJmm4vPk9s5WADtN5Kn8U9IuvIrl90l6VV3ny3p1eJ7AF0sGXZ3f03SV/fRWSJpZfH1SknxXjoAGtfqGnTT3L2/+HqHpGllP2hmyyQta/F2ALRJ5QUn3d2jDRvdfYWkFRIbOwJNarX1ttPMpktS8XlX+4YEoA6thv15STcVX98k6VftGQ6AuiT77Gb2lKTvSpoqaaekuyU9J+mXks6XtFXS9e5evhn2/19XbU/jq64bX7UeSfVkU3uoR/uvV9Xb2xvWjxw5EtZT5wBUOcfgwgsvDOsff/xxy9edGldqTfqUw4cPVzq+irI+e/I1u7vfUFL6XqURAegoTpcFMkHYgUwQdiAThB3IBGEHMsGWzYVUC3JgYCCsR3p6esJ61WWHozZRqsWUmsKakrr+aNvkqCZJixYtamlMp0W/0xMnToTHpqa4Vvl7aAqP7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZKKr+ux1budcdTnnKuq+7QMHDpTWUv3iVK87dXyqTx8tF51axvq6664L60ePHg3rY8eOLa2l+uyp31mTWzK3ikd2IBOEHcgEYQcyQdiBTBB2IBOEHcgEYQcy0fE+ezS3u5t75dGSyanllFPq3Fb50ksvDY+dM2dOWE8tJf3cc8+F9UjUB5ekhQsXhvUqW3inlqGOzl2Qqi/B3QQe2YFMEHYgE4QdyARhBzJB2IFMEHYgE4QdyETH++zRnPU6++ipufKped1RT3j06PhuXLp0aVhPHb9kyZKwPmbMmNLa3Llzw2MnTZoU1lO97Ndff73l42fPnh0em1qbPdXrXr9+fWnt8ssvD4+N7lOpO/voKclHdjN7zMx2mdmGIZctN7PtZrau+Liq3mECqGokT+OfkHTlMJc/7O5zi4+X2jssAO2WDLu7vyZpbwfGAqBGVd6gu83M3i+e5k8u+yEzW2Zma81sbYXbAlBRq2H/maRvS5orqV/SQ2U/6O4r3H2eu89r8bYAtEFLYXf3ne4+4O6nJP1c0vz2DgtAu7UUdjObPuTbH0jaUPazALqDpfqoZvaUpO9Kmippp6S7i+/nSnJJn0q6xd37kzdmFt5Yqt+cmvcdmTVrVli/5pprwvrixYtLa6l516l526m509H+61K8hnlfX194bErVed3R7/SLL74Ij504cWJYT9m8eXNpbdWqVeGxDz1U+spUUnf32d192JNKkifVuPsNw1z8aOURAegoTpcFMkHYgUwQdiAThB3IBGEHMpFsvbX1xsw8Wna5zimud999d1hfvnx5WN+zZ09pberUqa0M6UuprYf37o2nJkT1Cy64IDw21RZMbdmccuzYsdJaahpp6u8h1YqNpi2ntlx++eWXw/rNN98c1pvc0rms9cYjO5AJwg5kgrADmSDsQCYIO5AJwg5kgrADmeh4nz2qV9maODXVMtX3rLLt8q5du8L61q1bw/oDDzwQ1levXh3W580rXwTo4YcfDo9Nbdk8eXLpimOSpG3btoX16Hf6xBNPhMd+8sknYf3aa68N69HU46rTa1988cWwnpoyXSf67EDmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZKKjffZRo0Z5ND/6+PHj4fHnnHNOaW337t3hsak+e2rudNQvTm0HvWnTprA+ZcqUsJ5atjha7vn8888Pj03NZ08t771v376wfuONN5bWXnjhhfDYlNQ6AtFy0YsWLQqPTa0xkLpfUst/14k+O5A5wg5kgrADmSDsQCYIO5AJwg5kgrADmeiq+exVpPqeK1euDOvXX399y9d/+PDh8Nhx48aF9dS2yKl5/gMDA6W11Lrvb775Zlh/8sknw/q6devC+htvvFFaS51fkOrhp37n0Xkb8+fPD499++23w/rjjz8e1lPrytep5T67mZ1nZr81s41m9oGZ/aS4fIqZvWJmm4vP8SoHABo1kqfxJyX9jbtfLOnPJf3YzC6WdJekV919tqRXi+8BdKlk2N29393fK74+IOlDSTMkLZF0+rnxSklL6xokgOriFz1fYWazJH1H0tuSprl7f1HaIWlayTHLJC1rfYgA2mHE78abWZ+kNZJud/f9Q2s++C7fsG++ufsKd5/n7uWrIgKo3YjCbmZnaDDov3D3Z4qLd5rZ9KI+XVK8xCqARiVbbzY4f3OlpL3ufvuQyx+Q9Jm732dmd0ma4u5/m7iu8MbOPffccCw7duwI65Fo+15JmjlzZli/9957S2szZswIj01tuZzaujjaLlqS7r///tLaxo0bw2NTU1xT2yKnpKYtR1JtwxMnToT1aOpx6u9+woQJYb3qlOk6lbXeRvKa/S8k/ZWk9WZ2uqn6U0n3Sfqlmf1I0lZJcaMaQKOSYXf3/5JU9l/k99o7HAB14XRZIBOEHcgEYQcyQdiBTBB2IBMdneLa09PjUV83NVU06n3u37+/tCZJfX19YT3VN416vlX6vVK655s6RyDqZad6+MeOHQvrVUW/79Ryzampwam/lyq/s5SqY6sTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJrlpKOjWHOOqlp5YVrjove/r06aW1/v7+0tpI9Pb2hvXUls11XndqGetDhw6F9SpzylNGjYofq6rMKW/6/IQq6LMDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJruqzA6iOPjuQOcIOZIKwA5kg7EAmCDuQCcIOZIKwA5lIht3MzjOz35rZRjP7wMx+Uly+3My2m9m64uOq+ocLoFXJk2rMbLqk6e7+npmdJeldSUs1uB/7QXd/cMQ3xkk1QO3KTqoZyf7s/ZL6i68PmNmHkma0d3gA6vYHvWY3s1mSviPp7eKi28zsfTN7zMwmlxyzzMzWmtnaSiMFUMmIz403sz5J/ynpXnd/xsymSdojySX9gwaf6t+cuA6exgM1K3saP6Kwm9kZkl6U9Gt3/6dh6rMkvejuf5K4HsIO1KzliTA2uDzoo5I+HBr04o27034gaUPVQQKoz0jejV8g6XVJ6yWdXpv3p5JukDRXg0/jP5V0S/FmXnRdPLIDNav0NL5dCDtQP+azA5kj7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmkgtOttkeSVuHfD+1uKwbdevYunVcEmNrVTvHdkFZoaPz2b9242Zr3X1eYwMIdOvYunVcEmNrVafGxtN4IBOEHchE02Ff0fDtR7p1bN06LomxtaojY2v0NTuAzmn6kR1AhxB2IBONhN3MrjSz/zWzj8zsribGUMbMPjWz9cU21I3uT1fsobfLzDYMuWyKmb1iZpuLz8PusdfQ2LpiG+9gm/FG77umtz/v+Gt2M+uRtEnSX0raJukdSTe4+8aODqSEmX0qaZ67N34ChpktlHRQ0qrTW2uZ2T9K2uvu9xX/UU5297/rkrEt1x+4jXdNYyvbZvyv1eB9187tz1vRxCP7fEkfufsWdz8u6WlJSxoYR9dz99ck7f3KxUskrSy+XqnBP5aOKxlbV3D3fnd/r/j6gKTT24w3et8F4+qIJsI+Q9Lvhny/Td2137tL+o2ZvWtmy5oezDCmDdlma4ekaU0OZhjJbbw76SvbjHfNfdfK9udV8Qbd1y1w9z+T9H1JPy6ernYlH3wN1k29059J+rYG9wDsl/RQk4MpthlfI+l2d98/tNbkfTfMuDpyvzUR9u2Szhvy/czisq7g7tuLz7skPavBlx3dZOfpHXSLz7saHs+X3H2nuw+4+ylJP1eD912xzfgaSb9w92eKixu/74YbV6futybC/o6k2Wb2LTM7U9IPJT3fwDi+xszGF2+cyMzGS7pC3bcV9fOSbiq+vknSrxocy+/plm28y7YZV8P3XePbn7t7xz8kXaXBd+Q/lvT3TYyhZFwXSvrv4uODpscm6SkNPq07ocH3Nn4k6WxJr0raLOk/JE3porH9mwa39n5fg8Ga3tDYFmjwKfr7ktYVH1c1fd8F4+rI/cbpskAmeIMOyARhBzJB2IFMEHYgE4QdyARhBzJB2IFM/B+tIjCppYWKvAAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<Figure size 432x288 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[sudo] password for xilinx: Collecting git+https://github.com/fbcotter/dataset_loading.git@0.0.4\n",
+      "  Cloning https://github.com/fbcotter/dataset_loading.git (to 0.0.4) to /tmp/pip-hhwx4j3n-build\n",
+      "  Requirement already satisfied (use --upgrade to upgrade): dataset-loading==0.0.4 from git+https://github.com/fbcotter/dataset_loading.git@0.0.4 in /usr/local/lib/python3.6/dist-packages\n",
+      "Requirement already satisfied: Pillow in /usr/lib/python3/dist-packages (from dataset-loading==0.0.4)\n",
+      "Requirement already satisfied: scipy in /usr/lib/python3/dist-packages (from dataset-loading==0.0.4)\n",
+      "Connection to 192.168.2.99 closed.\n"
+     ]
     }
    ],
    "source": [
-    "from pkgutil import get_data\n",
-    "import onnx.numpy_helper as nph\n",
-    "import matplotlib.pyplot as plt\n",
-    "\n",
-    "raw_i = get_data(\"finn\", \"data/onnx/mnist-conv/test_data_set_0/input_0.pb\")\n",
-    "x = nph.to_array(onnx.load_tensor_from_string(raw_i))\n",
-    "plt.imshow(x.reshape(28,28), cmap='gray')"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Recall that we partitioned our original network into a parent graph that contained the non-synthesizable nodes and a child graph that contained the bulk of the network, which we turned into a bitfile. We'll load up the parent graph, modify the `StreamingDataflowPartition` node so that it points to the deployed ONNX graph."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 42,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "parent_model = ModelWrapper(build_dir+\"/tfc_w1_a1_dataflow_parent.onnx\")\n",
-    "sdp_node = parent_model.graph.node[2]\n",
-    "remote_exec_model = build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\"\n",
-    "getCustomOp(sdp_node).set_nodeattr(\"model\", remote_exec_model)\n",
-    "parent_model.save(build_dir+\"/tfc_w1_a1_dataflow_parent_with_remote_bitfile_exec.onnx\")"
+    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'echo {password} | sudo -S pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading'"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Finally, we can call `execute_onnx` on the parent graph, which will internally call remote execution with the bitfile once the `StreamingDataflowPartition` node is reached, grab the results, then continue executing the last portion of the network. "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 43,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "from finn.core.onnx_exec import execute_onnx\n",
-    "iname = parent_model.graph.input[0].name\n",
-    "oname = parent_model.graph.output[0].name\n",
-    "ishape = parent_model.get_tensor_shape(iname)\n",
-    "input_dict = {iname: x.reshape(ishape)}\n",
-    "ret = execute_onnx(parent_model, input_dict, True)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We'll pass the output of the network through a softmax function to interpret it as probabilities, and plot the per-class probabilities as a bar chart."
+    "We can now use the `validate.py` script that was generated together with the driver to measure top-1 accuracy on the MNIST dataset.\n",
+    "\n",
+    "Command to execute on PYNQ:\n",
+    "\n",
+    "`python3.6 validate.py --dataset mnist --batchsize 1000`"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 44,
+   "execution_count": 108,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "<BarContainer object of 10 artists>"
-      ]
-     },
-     "execution_count": 44,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAMp0lEQVR4nO3cf6zdd13H8eeL1qoMgia7f2jbcRttMA2iI9cyJUHDZtJlpjVhJl0CYQbSmFCZQqKdmv1R/4Fhpv7RGJoxQxQsOPnj4qrVCP7hHyy9+xGgq43XOtdWDHeAYDRaGt7+0VNyvLvt/XY79572fZ+PZMn5fr+f3O/7bN0z336/95xUFZKkm9+rpj2AJGkyDLokNWHQJakJgy5JTRh0SWpi87ROfOutt9bs7Oy0Ti9JN6WnnnrqxaqaWenY1II+OzvLwsLCtE4vSTelJP96tWPecpGkJgy6JDVh0CWpCYMuSU0MCnqSPUnOJFlMcmiF4/cnWUry7Oif905+VEnStaz6Wy5JNgFHgJ8HzgMnk8xX1XPLln6qqg6uwYySpAGGXKHvBhar6mxVXQSOAfvWdixJ0vUaEvStwLmx7fOjfcu9I8kXkzyeZPtKPyjJgSQLSRaWlpZexriSpKuZ1EPRzwKzVfUm4G+Bj6+0qKqOVtVcVc3NzKz4QSdJ0ss05JOiF4DxK+5to33fVVVfG9t8FHj4lY+m5WYPPbHm53j+Q/es+TkkrY0hV+gngZ1JdiTZAuwH5scXJPmhsc29wOnJjShJGmLVK/SqupTkIHAC2AQ8VlWnkhwGFqpqHnh/kr3AJeDrwP1rOLMkaQWDvpyrqo4Dx5fte2js9YPAg5MdTZJ0PfykqCQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgYFPcmeJGeSLCY5dI1170hSSeYmN6IkaYhVg55kE3AEuBvYBdyXZNcK614LPAA8OekhJUmrG3KFvhtYrKqzVXUROAbsW2Hd7wIfBv5ngvNJkgYaEvStwLmx7fOjfd+V5M3A9qp6YoKzSZKuwyt+KJrkVcAjwAcHrD2QZCHJwtLS0is9tSRpzJCgXwC2j21vG+274rXAG4G/T/I8cAcwv9KD0ao6WlVzVTU3MzPz8qeWJL3EkKCfBHYm2ZFkC7AfmL9ysKq+WVW3VtVsVc0CXwD2VtXCmkwsSVrRqkGvqkvAQeAEcBr4dFWdSnI4yd61HlCSNMzmIYuq6jhwfNm+h66y9ude+ViSpOvlJ0UlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpiUFBT7InyZkki0kOrXD8V5J8KcmzSf4hya7JjypJupZVg55kE3AEuBvYBdy3QrA/WVU/XlU/CTwMPDLxSSVJ1zTkCn03sFhVZ6vqInAM2De+oKq+NbZ5C1CTG1GSNMTmAWu2AufGts8Db1m+KMn7gA8AW4C3r/SDkhwADgDcdttt1zurJOkaJvZQtKqOVNWPAL8J/M5V1hytqrmqmpuZmZnUqSVJDAv6BWD72Pa20b6rOQb84isZSpJ0/YYE/SSwM8mOJFuA/cD8+IIkO8c27wH+aXIjSpKGWPUeelVdSnIQOAFsAh6rqlNJDgMLVTUPHExyF/Bt4BvAu9dyaEnSSw15KEpVHQeOL9v30NjrByY8lyTpOvlJUUlqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWpiUNCT7ElyJslikkMrHP9AkueSfDHJ3yV5/eRHlSRdy6pBT7IJOALcDewC7kuya9myZ4C5qnoT8Djw8KQHlSRd25Ar9N3AYlWdraqLwDFg3/iCqvp8Vf33aPMLwLbJjilJWs2QoG8Fzo1tnx/tu5r3AH+10oEkB5IsJFlYWloaPqUkaVUTfSia5J3AHPCRlY5X1dGqmququZmZmUmeWpI2vM0D1lwAto9tbxvt+3+S3AX8NvCzVfW/kxlPkjTUkCv0k8DOJDuSbAH2A/PjC5LcDnwU2FtVX538mJKk1awa9Kq6BBwETgCngU9X1akkh5PsHS37CPAa4M+TPJtk/io/TpK0RobccqGqjgPHl+17aOz1XROeS5J0nfykqCQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDUxKOhJ9iQ5k2QxyaEVjr8tydNJLiW5d/JjSpJWs2rQk2wCjgB3A7uA+5LsWrbsBeB+4JOTHlCSNMzmAWt2A4tVdRYgyTFgH/DclQVV9fzo2HfWYEZJ0gBDbrlsBc6NbZ8f7btuSQ4kWUiysLS09HJ+hCTpKtb1oWhVHa2quaqam5mZWc9TS1J7Q4J+Adg+tr1ttE+SdAMZEvSTwM4kO5JsAfYD82s7liTpeq0a9Kq6BBwETgCngU9X1akkh5PsBUjyU0nOA78EfDTJqbUcWpL0UkN+y4WqOg4cX7bvobHXJ7l8K0aSNCV+UlSSmjDoktSEQZekJgy6JDUx6KGoJK2n2UNPrPk5nv/QPWt+jvVm0DWI/4NJNz5vuUhSEzflFbpXi5L0Ul6hS1ITBl2SmjDoktTETXkPXdLa81nVzceg66aw1nExLOrAWy6S1IRBl6QmvOUi3cC81aTrYdClVRhV3Sy85SJJTRh0SWrCoEtSE95Dv05+2ELSjcqgS9KYm/mizVsuktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNTEo6En2JDmTZDHJoRWOf2+ST42OP5lkdtKDSpKubdWgJ9kEHAHuBnYB9yXZtWzZe4BvVNWPAr8PfHjSg0qSrm3IFfpuYLGqzlbVReAYsG/Zmn3Ax0evHwfuTJLJjSlJWk2q6toLknuBPVX13tH2u4C3VNXBsTVfHq05P9r+59GaF5f9rAPAgdHmG4Azk3ojA9wKvLjqqn583xuL77u/11fVzEoH1vX70KvqKHB0Pc95RZKFqpqbxrmnyfe9sfi+N7Yht1wuANvHtreN9q24Jslm4HXA1yYxoCRpmCFBPwnsTLIjyRZgPzC/bM088O7R63uBz9Vq93IkSRO16i2XqrqU5CBwAtgEPFZVp5IcBhaqah74GPAnSRaBr3M5+jeaqdzquQH4vjcW3/cGtupDUUnSzcFPikpSEwZdkppoH/TVvragoyTbk3w+yXNJTiV5YNozrackm5I8k+Qvpz3LekryA0keT/KPSU4n+elpz7Qekvz66M/5l5P8WZLvm/ZM09I66AO/tqCjS8AHq2oXcAfwvg3yvq94ADg97SGm4A+Bv66qHwN+gg3w7yDJVuD9wFxVvZHLv7hxI/5SxrpoHXSGfW1BO1X1lap6evT6P7n8P/bW6U61PpJsA+4BHp32LOspyeuAt3H5N86oqotV9R/TnWrdbAa+f/QZmFcD/zbleaame9C3AufGts+zQcJ2xeibL28HnpzuJOvmD4DfAL4z7UHW2Q5gCfjj0e2mR5PcMu2h1lpVXQB+D3gB+Arwzar6m+lONT3dg76hJXkN8BfAr1XVt6Y9z1pL8gvAV6vqqWnPMgWbgTcDf1RVtwP/BbR/ZpTkB7n8t+4dwA8DtyR553Snmp7uQR/ytQUtJfkeLsf8E1X1mWnPs07eCuxN8jyXb6+9PcmfTnekdXMeOF9VV/4m9jiXA9/dXcC/VNVSVX0b+AzwM1OeaWq6B33I1xa0M/rq4o8Bp6vqkWnPs16q6sGq2lZVs1z+b/25qtoQV2tV9e/AuSRvGO26E3huiiOtlxeAO5K8evTn/k42wMPgq1nXb1tcb1f72oIpj7Ue3gq8C/hSkmdH+36rqo5PcSatvV8FPjG6eDkL/PKU51lzVfVkkseBp7n8213PsIG/BsCP/ktSE91vuUjShmHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUxP8B9uoCk0KMtNwAAAAASUVORK5CYII=\n",
-      "text/plain": [
-       "<Figure size 432x288 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[sudo] password for xilinx: Looking for Train Imgs\n",
+      "Tar File found in data_dir. Not Downloading again\n",
+      "Looking for Train Labels\n",
+      "Tar File found in data_dir. Not Downloading again\n",
+      "Looking for Test Imgs\n",
+      "Tar File found in data_dir. Not Downloading again\n",
+      "Looking for Test Labels\n",
+      "Tar File found in data_dir. Not Downloading again\n",
+      "batch 0 / 10 : total OK 913 NOK 87\n",
+      "batch 1 / 10 : total OK 1800 NOK 200\n",
+      "batch 2 / 10 : total OK 2714 NOK 286\n",
+      "batch 3 / 10 : total OK 3619 NOK 381\n",
+      "batch 4 / 10 : total OK 4535 NOK 465\n",
+      "batch 5 / 10 : total OK 5488 NOK 512\n",
+      "batch 6 / 10 : total OK 6438 NOK 562\n",
+      "batch 7 / 10 : total OK 7399 NOK 601\n",
+      "batch 8 / 10 : total OK 8371 NOK 629\n",
+      "batch 9 / 10 : total OK 9296 NOK 704\n",
+      "Final accuracy: 92.960000\n",
+      "Connection to 192.168.2.99 closed.\n"
+     ]
     }
    ],
    "source": [
-    "def softmax(x):\n",
-    "    \"\"\"Compute softmax values for each sets of scores in x.\"\"\"\n",
-    "    e_x = np.exp(x - np.max(x))\n",
-    "    return e_x / e_x.sum()\n",
-    "\n",
-    "logits = ret[oname].flatten()\n",
-    "prob = softmax(logits)\n",
-    "\n",
-    "plt.bar(np.arange(10), prob)"
+    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'cd {target_dir_pynq}; echo {password} | sudo -S python3.6 validate.py --dataset mnist --batchsize 1000'"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We see that the network correctly predicts this as a digit 2 with high probability. This concludes our tutorial on how to take a simple fully-connected BNN all the way down to hardware with FINN, and execute it remotely on a PYNQ board."
+    "We see that the final top-1 accuracy is 92.96%, which is very close to the 93.17% reported on the [BNN-PYNQ accuracy table in Brevitas](https://github.com/Xilinx/brevitas/tree/master/brevitas_examples/bnn_pynq). "
    ]
   },
   {
@@ -1918,7 +1677,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 45,
+   "execution_count": 104,
    "metadata": {},
    "outputs": [
     {
@@ -1926,18 +1685,20 @@
      "output_type": "stream",
      "text": [
       "Network metrics:\n",
-      "runtime[ms]: 1.4772415161132812\n",
-      "throughput[images/s]: 676937.378954164\n",
-      "DRAM_in_bandwidth[Mb/s]: 75.81698644286635\n",
-      "DRAM_out_bandwidth[Mb/s]: 27.07749515816656\n"
+      "runtime[ms]: 10.43391227722168\n",
+      "throughput[images/s]: 958413.2714850444\n",
+      "DRAM_in_bandwidth[Mb/s]: 751.3960048442748\n",
+      "DRAM_out_bandwidth[Mb/s]: 0.9584132714850445\n",
+      "fclk[mhz]: 100.0\n",
+      "N: 10000\n"
      ]
     }
    ],
    "source": [
-    "from finn.core.throughput_test import throughput_test\n",
+    "from finn.core.throughput_test import throughput_test_remote\n",
     "\n",
-    "child_model = ModelWrapper(getCustomOp(sdp_node).get_nodeattr(\"model\"))\n",
-    "res = throughput_test(child_model)\n",
+    "model = ModelWrapper(build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\")\n",
+    "res = throughput_test_remote(model, 10000)\n",
     "print(\"Network metrics:\")\n",
     "for key in res:\n",
     "    print(str(key) + \": \" + str(res[key]))"
@@ -1952,14 +1713,14 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 46,
+   "execution_count": 105,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "We reach approximately 43% of the ideal performance.\n"
+      "We reach approximately 61% of the ideal performance.\n"
      ]
     }
    ],
@@ -1979,8 +1740,15 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "The measured values were recorded with a batch size of 1000 and at a frequency of 100 MHz. We will be improving the efficiency of the generated accelerator examples in the coming FINN releases."
+    "The measured values were recorded with a batch size of 10000 and at a frequency of 100 MHz. We will be improving the efficiency of the generated accelerator examples in the coming FINN releases."
    ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
   }
  ],
  "metadata": {
diff --git a/notebooks/end2end_example/tfc_end2end_verification.ipynb b/notebooks/end2end_example/tfc_end2end_verification.ipynb
index 1ce17d3da3e4313f2b89eb6a790efcee704f70e9..92de7fb7e42b5d0013af31cc0fd88e34d354def8 100644
--- a/notebooks/end2end_example/tfc_end2end_verification.ipynb
+++ b/notebooks/end2end_example/tfc_end2end_verification.ipynb
@@ -264,7 +264,7 @@
        "        "
       ],
       "text/plain": [
-       "<IPython.lib.display.IFrame at 0x7f776245ac18>"
+       "<IPython.lib.display.IFrame at 0x7f3cac09d978>"
       ]
      },
      "execution_count": 8,
@@ -409,7 +409,16 @@
    "outputs": [],
    "source": [
     "from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim\n",
-    "child_model = ModelWrapper(build_dir + \"/tfc_w1_a1_ipgen.onnx\")\n",
+    "from finn.transformation.fpgadataflow.prepare_ip import PrepareIP\n",
+    "from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP\n",
+    "\n",
+    "test_fpga_part = \"xc7z020clg400-1\"\n",
+    "target_clk_ns = 10\n",
+    "\n",
+    "child_model = ModelWrapper(build_dir + \"/tfc_w1_a1_set_folding_factors.onnx\")\n",
+    "child_model = child_model.transform(GiveUniqueNodeNames())\n",
+    "child_model = child_model.transform(PrepareIP(test_fpga_part, target_clk_ns))\n",
+    "child_model = child_model.transform(HLSSynthIP())\n",
     "child_model = child_model.transform(SetExecMode(\"rtlsim\"))\n",
     "child_model = child_model.transform(PrepareRTLSim())\n",
     "child_model.save(build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")"
@@ -478,18 +487,44 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 15,
+   "execution_count": 18,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/workspace/finn/src/finn/transformation/fpgadataflow/hlssynth_ip.py:70: UserWarning: Using pre-existing IP for StreamingFCLayer_Batch_3\n",
+      "  warnings.warn(\"Using pre-existing IP for %s\" % node.name)\n",
+      "/workspace/finn/src/finn/transformation/fpgadataflow/hlssynth_ip.py:70: UserWarning: Using pre-existing IP for StreamingFCLayer_Batch_1\n",
+      "  warnings.warn(\"Using pre-existing IP for %s\" % node.name)\n",
+      "/workspace/finn/src/finn/transformation/fpgadataflow/hlssynth_ip.py:70: UserWarning: Using pre-existing IP for StreamingFCLayer_Batch_2\n",
+      "  warnings.warn(\"Using pre-existing IP for %s\" % node.name)\n",
+      "/workspace/finn/src/finn/transformation/fpgadataflow/hlssynth_ip.py:70: UserWarning: Using pre-existing IP for StreamingFCLayer_Batch_0\n",
+      "  warnings.warn(\"Using pre-existing IP for %s\" % node.name)\n"
+     ]
+    }
+   ],
    "source": [
-    "child_model = ModelWrapper(build_dir + \"/tfc_w1_a1_ipstitch.onnx\")\n",
+    "from finn.transformation.fpgadataflow.insert_dwc import InsertDWC\n",
+    "from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO\n",
+    "from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP\n",
+    "\n",
+    "child_model = ModelWrapper(build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")\n",
+    "child_model = child_model.transform(InsertDWC())\n",
+    "child_model = child_model.transform(InsertFIFO())\n",
+    "child_model = child_model.transform(GiveUniqueNodeNames())\n",
+    "child_model = child_model.transform(PrepareIP(test_fpga_part, target_clk_ns))\n",
+    "child_model = child_model.transform(HLSSynthIP())\n",
+    "child_model = child_model.transform(CreateStitchedIP(test_fpga_part, target_clk_ns))\n",
+    "child_model = child_model.transform(PrepareRTLSim())\n",
     "child_model.set_metadata_prop(\"exec_mode\",\"rtlsim\")\n",
     "child_model.save(build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 16,
+   "execution_count": 19,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -502,7 +537,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 17,
+   "execution_count": 20,
    "metadata": {},
    "outputs": [
     {
@@ -522,6 +557,13 @@
     "else:\n",
     "    print(\"The results are not the same!\")"
    ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
   }
  ],
  "metadata": {
diff --git a/notebooks/end2end_example/top.pdf b/notebooks/end2end_example/top.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..b9e6d4c75c23ee3093b45173df4e5a402a6c44e9
--- /dev/null
+++ b/notebooks/end2end_example/top.pdf
@@ -0,0 +1,2755 @@
+%PDF-1.4
+1 0 obj
+<<
+  /Title    (top_imp)
+  /Author   (maltanar)
+  /Producer (Concept Engineering GmbH)
+  /Creator  (Nlview 7.0.19  2019-03-26 bk=1.5019 VDI=41 GEI=35)
+  /CreationDate (D:20200921141652)
+>>
+endobj
+2 0 obj
+<<
+  /Type     /Catalog
+  /Pages    3 0 R
+  /Outlines 7 0 R
+  /PageMode /UseThumbs
+  /ViewerPreferences << /DisplayDocTitle true >>
+>>
+endobj
+4 0 obj
+<<
+  /Type     /Font
+  /Subtype  /Type1
+  /Name     /F1
+  /BaseFont /Helvetica
+  /Encoding /MacRomanEncoding
+>>
+endobj
+5 0 obj
+<<
+  /ExtGState  6 0 R
+  /Font       << /F1 4 0 R >>
+  /ColorSpace << /PCS [/Pattern /DeviceRGB] >>
+  /Pattern    8 0 R
+  /XObject    9 0 R
+>>
+endobj
+%
+% Nlview page 1
+% (user space scaling 0.298052)
+%
+10 0 obj
+<<
+  /Type      /Page
+  /Parent    3 0 R
+  /Resources 5 0 R
+  /Contents  11 0 R
+  /MediaBox  [0 0 792 612]
+  /Rotate    270
+>>
+endobj
+11 0 obj
+<<
+  /Length 22608
+>>
+stream
+1 0 0 1 0 201.197 cm
+1 0 0 1 28.8 28.8 cm
+0.298052 0 0 -0.298052 0 0 cm
+1 0 0 1 0 -510 cm
+0 0 2464 510 re
+W n
+/GS gs
+1 0 0 1 10 0 cm
+q
+1.000 1.000 1.000 rg
+/GSa0 gs
+-10 0 2464 511 re
+f
+Q
+q
+0.867 0.831 0.816 rg
+/GSa0 gs
+2340 340 m
+2340 333 l
+2354 333 l
+2361 340 l
+2354 347 l
+2340 347 l
+h f
+Q
+[] 0 d
+3 w
+0.165 0.369 0.435 RG
+/GSA0 gs
+2340 340 m
+2340 333 l
+2354 333 l
+2361 340 l
+2354 347 l
+2340 347 l
+h S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2365 340 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+0 -4.308 Td
+(DDR) Tj
+ET
+Q
+q
+0.867 0.831 0.816 rg
+/GSa0 gs
+2340 360 m
+2340 353 l
+2354 353 l
+2361 360 l
+2354 367 l
+2340 367 l
+h f
+Q
+2340 360 m
+2340 353 l
+2354 353 l
+2361 360 l
+2354 367 l
+2340 367 l
+h S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2365 360 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+0 -4.308 Td
+(FIXED_IO) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1067 202 m
+1233 202 l
+1233 202 l
+1236 203 l
+1238 204 l
+1239 206 l
+1240 209 l
+1240 209 l
+1240 291 l
+1240 291 l
+1239 294 l
+1238 296 l
+1236 297 l
+1233 298 l
+1233 298 l
+1067 298 l
+1067 298 l
+1064 297 l
+1062 296 l
+1061 294 l
+1060 291 l
+1060 291 l
+1060 209 l
+1060 209 l
+1061 206 l
+1062 204 l
+1064 203 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1150 200 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-79.362 2.484 Td
+(StreamingDataflowPartition_1) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 1150 300 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-95.706 -8.616 Td
+(StreamingDataflowPartition_1_v1_0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1050 221 10 18 re
+f
+Q
+q
+1 0 0 1 1069.5 230.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1055 230 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im1 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1077 230 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(s_axis_0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1240 241 10 18 re
+f
+Q
+q
+1 0 0 1 1230.5 250.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1245 250 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1223 250 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-42.79 -3.59 Td
+(m_axis_0) Tj
+ET
+Q
+0.000 0.000 0.000 RG
+1050 250 m
+1060 250 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1062 250 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+1060 270 m
+1060 268.343 1058.66 267 1057 267 c
+1055.34 267 1054 268.343 1054 270 c
+1054 271.657 1055.34 273 1057 273 c
+1058.66 273 1060 271.657 1060 270 c
+S
+1050 270 m
+1054 270 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1062 270 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1067 202 m
+1233 202 l
+S
+1240 209 m
+1240 208.889 1240 208.778 1240 208.667 c
+1239.91 204.893 1236.77 201.908 1233 202 c
+S
+1240 209 m
+1240 291 l
+S
+1233 298 m
+1233.11 298.003 1233.22 298.003 1233.33 298 c
+1237.11 297.908 1240.09 294.774 1240 291 c
+S
+1233 298 m
+1067 298 l
+S
+1060 291 m
+1060 291.111 1060 291.222 1060 291.333 c
+1060.09 295.107 1063.23 298.092 1067 298 c
+S
+1060 291 m
+1060 209 l
+S
+1067 202 m
+1066.89 201.997 1066.78 201.997 1066.67 202 c
+1062.89 202.092 1059.91 205.226 1060 209 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+427 52 m
+653 52 l
+653 52 l
+656 53 l
+658 54 l
+659 56 l
+660 59 l
+660 59 l
+660 261 l
+660 261 l
+659 264 l
+658 266 l
+656 267 l
+653 268 l
+653 268 l
+427 268 l
+427 268 l
+424 267 l
+422 266 l
+421 264 l
+420 261 l
+420 261 l
+420 59 l
+420 59 l
+421 56 l
+422 54 l
+424 53 l
+h f
+Q
+q
+1 0 0 1 550 160 cm
+1 0 0 1 -24 -30 cm
+48 0 0 -60 0 60 cm /Im3 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 540 50 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-50.358 2.484 Td
+(axi_interconnect_0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 540 270 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-44.358 -8.616 Td
+(AXI Interconnect) Tj
+ET
+Q
+q
+1 0 0 1 431 63 cm
+1 0 0 1 -8 -8 cm
+16 0 0 -16 0 16 cm /Im4 Do
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+410 71 10 18 re
+f
+Q
+q
+1 0 0 1 429.5 80.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 415 80 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im5 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 437 80 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S00_AXI) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+660 141 10 18 re
+f
+Q
+q
+1 0 0 1 650.5 150.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 665 150 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im6 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 643 150 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-41.13 -3.59 Td
+(M00_AXI) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+660 161 10 18 re
+f
+Q
+q
+1 0 0 1 650.5 170.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 665 170 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im6 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 643 170 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-41.13 -3.59 Td
+(M01_AXI) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+410 100 m
+420 100 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 100 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ACLK) Tj
+ET
+Q
+410 120 m
+420 120 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 120 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ARESETN) Tj
+ET
+Q
+410 140 m
+420 140 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 140 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S00_ACLK) Tj
+ET
+Q
+410 160 m
+420 160 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 160 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S00_ARESETN) Tj
+ET
+Q
+410 180 m
+420 180 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 180 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(M00_ACLK) Tj
+ET
+Q
+410 200 m
+420 200 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 200 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(M00_ARESETN) Tj
+ET
+Q
+410 220 m
+420 220 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 220 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(M01_ACLK) Tj
+ET
+Q
+410 240 m
+420 240 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 422 240 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(M01_ARESETN) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+427 52 m
+653 52 l
+S
+660 59 m
+660.003 58.8889 660.003 58.7778 660 58.6667 c
+659.908 54.8927 656.774 51.908 653 52 c
+S
+660 59 m
+660 261 l
+S
+653 268 m
+653.111 268.003 653.222 268.003 653.333 268 c
+657.107 267.908 660.092 264.774 660 261 c
+S
+653 268 m
+427 268 l
+S
+420 261 m
+419.997 261.111 419.997 261.222 420 261.333 c
+420.092 265.107 423.226 268.092 427 268 c
+S
+420 261 m
+420 59 l
+S
+427 52 m
+426.889 51.9973 426.778 51.9973 426.667 52 c
+422.893 52.092 419.908 55.2261 420 59 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+737 202 m
+963 202 l
+963 202 l
+966 203 l
+968 204 l
+969 206 l
+970 209 l
+970 209 l
+970 291 l
+970 291 l
+969 294 l
+968 296 l
+966 297 l
+963 298 l
+963 298 l
+737 298 l
+737 298 l
+734 297 l
+732 296 l
+731 294 l
+730 291 l
+730 291 l
+730 209 l
+730 209 l
+731 206 l
+732 204 l
+734 203 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 850 200 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-16.338 2.484 Td
+(idma0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 850 300 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-95.706 -8.616 Td
+(StreamingDataflowPartition_0_v1_0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+720 221 10 18 re
+f
+Q
+q
+1 0 0 1 739.5 230.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 725 230 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im7 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 747 230 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(s_axi_control) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+970 231 10 18 re
+f
+Q
+q
+1 0 0 1 960.5 240.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 975 240 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im6 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 953 240 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-65.57 -3.59 Td
+(m_axi_gmem0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+970 251 10 18 re
+f
+Q
+q
+1 0 0 1 960.5 260.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 975 260 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im2 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 953 260 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-42.79 -3.59 Td
+(m_axis_0) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+720 250 m
+730 250 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 732 250 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+730 270 m
+730 268.343 728.657 267 727 267 c
+725.343 267 724 268.343 724 270 c
+724 271.657 725.343 273 727 273 c
+728.657 273 730 271.657 730 270 c
+S
+720 270 m
+724 270 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 732 270 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+737 202 m
+963 202 l
+S
+970 209 m
+970.003 208.889 970.003 208.778 970 208.667 c
+969.908 204.893 966.774 201.908 963 202 c
+S
+970 209 m
+970 291 l
+S
+963 298 m
+963.111 298.003 963.222 298.003 963.333 298 c
+967.107 297.908 970.092 294.774 970 291 c
+S
+963 298 m
+737 298 l
+S
+730 291 m
+729.997 291.111 729.997 291.222 730 291.333 c
+730.092 295.107 733.226 298.092 737 298 c
+S
+730 291 m
+730 209 l
+S
+737 202 m
+736.889 201.997 736.778 201.997 736.667 202 c
+732.893 202.092 729.908 205.226 730 209 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1317 202 m
+1543 202 l
+1543 202 l
+1546 203 l
+1548 204 l
+1549 206 l
+1550 209 l
+1550 209 l
+1550 311 l
+1550 311 l
+1549 314 l
+1548 316 l
+1546 317 l
+1543 318 l
+1543 318 l
+1317 318 l
+1317 318 l
+1314 317 l
+1312 316 l
+1311 314 l
+1310 311 l
+1310 311 l
+1310 209 l
+1310 209 l
+1311 206 l
+1312 204 l
+1314 203 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1430 200 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-18.342 2.484 Td
+(odma0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 1430 320 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-95.706 -8.616 Td
+(StreamingDataflowPartition_2_v1_0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1300 221 10 18 re
+f
+Q
+q
+1 0 0 1 1319.5 230.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1305 230 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im7 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1327 230 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(s_axi_control) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1550 251 10 18 re
+f
+Q
+q
+1 0 0 1 1540.5 260.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1555 260 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im6 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1533 260 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-65.57 -3.59 Td
+(m_axi_gmem0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1300 241 10 18 re
+f
+Q
+q
+1 0 0 1 1319.5 250.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1305 250 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im1 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1327 250 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(s_axis_0) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1300 270 m
+1310 270 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1312 270 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_clk) Tj
+ET
+Q
+1310 290 m
+1310 288.343 1308.66 287 1307 287 c
+1305.34 287 1304 288.343 1304 290 c
+1304 291.657 1305.34 293 1307 293 c
+1308.66 293 1310 291.657 1310 290 c
+S
+1300 290 m
+1304 290 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1312 290 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ap_rst_n) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1317 202 m
+1543 202 l
+S
+1550 209 m
+1550 208.889 1550 208.778 1550 208.667 c
+1549.91 204.893 1546.77 201.908 1543 202 c
+S
+1550 209 m
+1550 311 l
+S
+1543 318 m
+1543.11 318.003 1543.22 318.003 1543.33 318 c
+1547.11 317.908 1550.09 314.774 1550 311 c
+S
+1543 318 m
+1317 318 l
+S
+1310 311 m
+1310 311.111 1310 311.222 1310 311.333 c
+1310.09 315.107 1313.23 318.092 1317 318 c
+S
+1310 311 m
+1310 209 l
+S
+1317 202 m
+1316.89 201.997 1316.78 201.997 1316.67 202 c
+1312.89 202.092 1309.91 205.226 1310 209 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+57 42 m
+343 42 l
+343 42 l
+346 43 l
+348 44 l
+349 46 l
+350 49 l
+350 49 l
+350 171 l
+350 171 l
+349 174 l
+348 176 l
+346 177 l
+343 178 l
+343 178 l
+57 178 l
+57 178 l
+54 177 l
+52 176 l
+51 174 l
+50 171 l
+50 171 l
+50 49 l
+50 49 l
+51 46 l
+52 44 l
+54 43 l
+h f
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 200 40 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-50.688 2.484 Td
+(rst_zynq_ps_100M) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 200 180 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-66.018 -8.616 Td
+(Processor System Reset) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+40 70 m
+50 70 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 52 70 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(slowest_sync_clk) Tj
+ET
+Q
+50 90 m
+50 88.3431 48.6569 87 47 87 c
+45.3431 87 44 88.3431 44 90 c
+44 91.6569 45.3431 93 47 93 c
+48.6569 93 50 91.6569 50 90 c
+S
+40 90 m
+44 90 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 52 90 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(ext_reset_in) Tj
+ET
+Q
+50 110 m
+50 108.343 48.6569 107 47 107 c
+45.3431 107 44 108.343 44 110 c
+44 111.657 45.3431 113 47 113 c
+48.6569 113 50 111.657 50 110 c
+S
+40 110 m
+44 110 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 52 110 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(aux_reset_in) Tj
+ET
+Q
+40 130 m
+50 130 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 52 130 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(mb_debug_sys_rst) Tj
+ET
+Q
+40 150 m
+50 150 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 52 150 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(dcm_locked) Tj
+ET
+Q
+360 70 m
+350 70 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 348 70 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-41.68 -3.59 Td
+(mb_reset) Tj
+ET
+Q
+5 w
+360 90 m
+350 90 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 348 90 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-93.38 -3.59 Td
+(bus_struct_reset[0:0]) Tj
+ET
+Q
+360 110 m
+350 110 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 348 110 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-91.71 -3.59 Td
+(peripheral_reset[0:0]) Tj
+ET
+Q
+356 130 m
+356 128.343 354.657 127 353 127 c
+351.343 127 350 128.343 350 130 c
+350 131.657 351.343 133 353 133 c
+354.657 133 356 131.657 356 130 c
+S
+360 130 m
+356 130 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 348 130 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-112.84 -3.59 Td
+(interconnect_aresetn[0:0]) Tj
+ET
+Q
+356 150 m
+356 148.343 354.657 147 353 147 c
+351.343 147 350 148.343 350 150 c
+350 151.657 351.343 153 353 153 c
+354.657 153 356 151.657 356 150 c
+S
+360 150 m
+356 150 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 348 150 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-102.83 -3.59 Td
+(peripheral_aresetn[0:0]) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+57 42 m
+343 42 l
+S
+350 49 m
+350.003 48.8889 350.003 48.7778 350 48.6667 c
+349.908 44.8927 346.774 41.908 343 42 c
+S
+350 49 m
+350 171 l
+S
+343 178 m
+343.111 178.003 343.222 178.003 343.333 178 c
+347.107 177.908 350.092 174.774 350 171 c
+S
+343 178 m
+57 178 l
+S
+50 171 m
+49.9973 171.111 49.9973 171.222 50 171.333 c
+50.092 175.107 53.2261 178.092 57 178 c
+S
+50 171 m
+50 49 l
+S
+57 42 m
+56.8889 41.9973 56.7778 41.9973 56.6667 42 c
+52.8927 42.092 49.908 45.2261 50 49 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1627 322 m
+1833 322 l
+1833 322 l
+1836 323 l
+1838 324 l
+1839 326 l
+1840 329 l
+1840 329 l
+1840 431 l
+1840 431 l
+1839 434 l
+1838 436 l
+1836 437 l
+1833 438 l
+1833 438 l
+1627 438 l
+1627 438 l
+1624 437 l
+1622 436 l
+1621 434 l
+1620 431 l
+1620 431 l
+1620 329 l
+1620 329 l
+1621 326 l
+1622 324 l
+1624 323 l
+h f
+Q
+q
+1 0 0 1 1729 380 cm
+1 0 0 1 -24 -30 cm
+48 0 0 -60 0 60 cm /Im8 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1730 320 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-42.684 2.484 Td
+(smartconnect_0) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 1730 440 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-49.686 -8.616 Td
+(AXI SmartConnect) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1610 341 10 18 re
+f
+Q
+q
+1 0 0 1 1629.5 350.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1615 350 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im5 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1637 350 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S00_AXI) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1610 361 10 18 re
+f
+Q
+q
+1 0 0 1 1629.5 370.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1615 370 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im5 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1637 370 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S01_AXI) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1840 371 10 18 re
+f
+Q
+q
+1 0 0 1 1830.5 380.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1845 380 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im6 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1823 380 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-41.13 -3.59 Td
+(M00_AXI) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1610 390 m
+1620 390 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1622 390 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(aclk) Tj
+ET
+Q
+1620 410 m
+1620 408.343 1618.66 407 1617 407 c
+1615.34 407 1614 408.343 1614 410 c
+1614 411.657 1615.34 413 1617 413 c
+1618.66 413 1620 411.657 1620 410 c
+S
+1610 410 m
+1614 410 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1622 410 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(aresetn) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1627 322 m
+1833 322 l
+S
+1840 329 m
+1840 328.889 1840 328.778 1840 328.667 c
+1839.91 324.893 1836.77 321.908 1833 322 c
+S
+1840 329 m
+1840 431 l
+S
+1833 438 m
+1833.11 438.003 1833.22 438.003 1833.33 438 c
+1837.11 437.908 1840.09 434.774 1840 431 c
+S
+1833 438 m
+1627 438 l
+S
+1620 431 m
+1620 431.111 1620 431.222 1620 431.333 c
+1620.09 435.107 1623.23 438.092 1627 438 c
+S
+1620 431 m
+1620 329 l
+S
+1627 322 m
+1626.89 321.997 1626.78 321.997 1626.67 322 c
+1622.89 322.092 1619.91 325.226 1620 329 c
+S
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1907 312 m
+2273 312 l
+2273 312 l
+2276 313 l
+2278 314 l
+2279 316 l
+2280 319 l
+2280 319 l
+2280 461 l
+2280 461 l
+2279 464 l
+2278 466 l
+2276 467 l
+2273 468 l
+2273 468 l
+1907 468 l
+1907 468 l
+1904 467 l
+1902 466 l
+1901 464 l
+1900 461 l
+1900 461 l
+1900 319 l
+1900 319 l
+1901 316 l
+1902 314 l
+1904 313 l
+h f
+Q
+q
+1 0 0 1 2115 390 cm
+1 0 0 1 -50 -15 cm
+100 0 0 -30 0 30 cm /Im9 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2090 310 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-22.344 2.484 Td
+(zynq_ps) Tj
+ET
+Q
+q
+0.255 0.380 0.624 rg
+/GSa0 gs
+1 0 0 1 2090 470 cm
+BT
+/F1 12 Tf
+1 0 0 -1 0 0 Tm
+-73.02 -8.616 Td
+(ZYNQ7 Processing System) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2280 331 10 18 re
+f
+Q
+q
+1 0 0 1 2270.5 340.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 2285 340 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im10 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2263 340 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-21.66 -3.59 Td
+(DDR) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2280 351 10 18 re
+f
+Q
+q
+1 0 0 1 2270.5 360.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 2285 360 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im10 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2263 360 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-45.57 -3.59 Td
+(FIXED_IO) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2280 371 10 18 re
+f
+Q
+q
+1 0 0 1 2270.5 380.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 2285 380 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im10 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2263 380 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-48.9 -3.59 Td
+(USBIND_0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1890 351 10 18 re
+f
+Q
+q
+1 0 0 1 1909.5 360.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1895 360 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im11 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1917 360 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S_AXI_HP0_FIFO_CTRL) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+2280 391 10 18 re
+f
+Q
+q
+1 0 0 1 2270.5 400.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 2285 400 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im6 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2263 400 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-55.58 -3.59 Td
+(M_AXI_GP0) Tj
+ET
+Q
+q
+0.929 0.965 0.996 rg
+/GSa0 gs
+1890 371 10 18 re
+f
+Q
+q
+1 0 0 1 1909.5 380.5 cm
+1 0 0 1 -7.5 -7.5 cm
+15 0 0 -15 0 15 cm /Im0 Do
+Q
+q
+1 0 0 1 1895 380 cm
+1 0 0 1 -5 -9 cm
+10 0 0 -18 0 18 cm /Im5 Do
+Q
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1917 380 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S_AXI_HP0) Tj
+ET
+Q
+3 w
+0.000 0.000 0.000 RG
+1890 400 m
+1900 400 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1902 400 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(M_AXI_GP0_ACLK) Tj
+ET
+Q
+1890 420 m
+1900 420 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 1902 420 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+0 -3.59 Td
+(S_AXI_HP0_ACLK) Tj
+ET
+Q
+2290 420 m
+2280 420 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2278 420 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-56.13 -3.59 Td
+(FCLK_CLK0) Tj
+ET
+Q
+2286 440 m
+2286 438.343 2284.66 437 2283 437 c
+2281.34 437 2280 438.343 2280 440 c
+2280 441.657 2281.34 443 2283 443 c
+2284.66 443 2286 441.657 2286 440 c
+S
+2290 440 m
+2286 440 l
+S
+q
+0.000 0.000 0.000 rg
+/GSa0 gs
+1 0 0 1 2278 440 cm
+BT
+/F1 10 Tf
+1 0 0 -1 0 0 Tm
+-82.8 -3.59 Td
+(FCLK_RESET0_N) Tj
+ET
+Q
+1 w
+0.255 0.380 0.624 RG
+1907 312 m
+2273 312 l
+S
+2280 319 m
+2280 318.889 2280 318.778 2280 318.667 c
+2279.91 314.893 2276.77 311.908 2273 312 c
+S
+2280 319 m
+2280 461 l
+S
+2273 468 m
+2273.11 468.003 2273.22 468.003 2273.33 468 c
+2277.11 467.908 2280.09 464.774 2280 461 c
+S
+2273 468 m
+1907 468 l
+S
+1900 461 m
+1900 461.111 1900 461.222 1900 461.333 c
+1900.09 465.107 1903.23 468.092 1907 468 c
+S
+1900 461 m
+1900 319 l
+S
+1907 312 m
+1906.89 311.997 1906.78 311.997 1906.67 312 c
+1902.89 312.092 1899.91 315.226 1900 319 c
+S
+4 w
+1250 250 m
+1300 250 l
+S
+670 150 m
+690 150 l
+690 230 l
+720 230 l
+S
+670 170 m
+1270 170 l
+1270 230 l
+1300 230 l
+S
+980 240 m
+1010 240 l
+1010 350 l
+1610 350 l
+S
+980 260 m
+1000 260 l
+1000 230 l
+1050 230 l
+S
+1560 260 m
+1590 260 l
+1590 370 l
+1610 370 l
+S
+3 w
+0.063 0.133 0.208 RG
+410 120 m
+390 120 l
+390 310 l
+700 310 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+697 307 7 7 re
+f
+Q
+1050 270 m
+1030 270 l
+1030 340 l
+1280 340 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1277 337 7 7 re
+f
+Q
+360 150 m
+390 150 l
+S
+390 160 m
+410 160 l
+S
+390 200 m
+410 200 l
+S
+390 240 m
+410 240 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+387 147 7 7 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+387 157 7 7 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+387 197 7 7 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+387 237 7 7 re
+f
+Q
+720 270 m
+700 270 l
+700 340 l
+1030 340 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1027 337 7 7 re
+f
+Q
+1300 290 m
+1280 290 l
+1280 410 l
+1610 410 l
+S
+4 w
+0.255 0.380 0.624 RG
+1850 380 m
+1890 380 l
+S
+2290 340 m
+2340 340 l
+S
+1 w
+0.063 0.133 0.208 RG
+40 70 m
+20 70 l
+20 210 l
+380 210 l
+S
+380 220 m
+410 220 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+378 138 5 5 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+378 178 5 5 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+378 208 5 5 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+378 218 5 5 re
+f
+Q
+720 250 m
+690 250 l
+690 330 l
+1020 330 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1018 328 5 5 re
+f
+Q
+1300 270 m
+1270 270 l
+1270 360 l
+1580 360 l
+1580 470 l
+1870 470 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1868 418 5 5 re
+f
+Q
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1868 468 5 5 re
+f
+Q
+410 100 m
+380 100 l
+380 300 l
+690 300 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+688 298 5 5 re
+f
+Q
+1050 250 m
+1020 250 l
+1020 330 l
+1270 330 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1268 328 5 5 re
+f
+Q
+380 140 m
+410 140 l
+S
+380 180 m
+410 180 l
+S
+1580 390 m
+1610 390 l
+S
+q
+0.063 0.133 0.208 rg
+/GSa0 gs
+1578 388 5 5 re
+f
+Q
+1890 400 m
+1870 400 l
+1870 500 l
+2310 500 l
+2310 420 l
+2290 420 l
+S
+1870 420 m
+1890 420 l
+S
+40 90 m
+10 90 l
+10 10 l
+2320 10 l
+2320 440 l
+2290 440 l
+S
+4 w
+0.255 0.380 0.624 RG
+2290 360 m
+2340 360 l
+S
+410 80 m
+380 80 l
+380 20 l
+2310 20 l
+2310 400 l
+2290 400 l
+S
+endstream
+endobj
+3 0 obj
+<<
+  /Type    /Pages
+  /Kids
+  [
+  10 0 R
+  ]
+  /Count   1
+  /ProcSet [ /PDF /Text /ImageB /ImageC ]
+>>
+endobj
+6 0 obj
+<<
+  /GS << /Type /ExtGState
+         /LC    0
+         /LJ    0
+         /ML    4.0
+         /ca    1.0
+         /CA    1.0
+         /AIS   false
+         /SMask /None
+  >>
+  /GSa0 << /Type /ExtGState /ca 1 >>
+  /GSA0 << /Type /ExtGState /CA 1 >>
+>>
+endobj
+26 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1 top_StreamingDataflowPartition_1_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 344 288 404 326]
+  /Parent 25 0 R
+  /Next   27 0 R
+>>
+endobj
+27 0 obj
+<<
+  /Title  (axi_interconnect_0 axi_interconnect_0_imp)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 153 297 231 370]
+  /Parent 25 0 R
+  /Prev   26 0 R
+  /Next   28 0 R
+>>
+endobj
+28 0 obj
+<<
+  /Title  (idma0 top_idma0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 246 288 323 326]
+  /Parent 25 0 R
+  /Prev   27 0 R
+  /Next   29 0 R
+>>
+endobj
+29 0 obj
+<<
+  /Title  (odma0 top_odma0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 419 282 496 326]
+  /Parent 25 0 R
+  /Prev   28 0 R
+  /Next   30 0 R
+>>
+endobj
+30 0 obj
+<<
+  /Title  (rst_zynq_ps_100M top_rst_zynq_ps_100M_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 43 324 139 373]
+  /Parent 25 0 R
+  /Prev   29 0 R
+  /Next   31 0 R
+>>
+endobj
+31 0 obj
+<<
+  /Title  (smartconnect_0 top_smartconnect_0_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 511 246 583 290]
+  /Parent 25 0 R
+  /Prev   30 0 R
+  /Next   32 0 R
+>>
+endobj
+32 0 obj
+<<
+  /Title  (zynq_ps top_zynq_ps_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 595 238 714 293]
+  /Parent 25 0 R
+  /Prev   31 0 R
+>>
+endobj
+25 0 obj
+<<
+  /Title  (instances)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 24 0 R
+  /First  26 0 R
+  /Last   32 0 R
+  /Count  7
+  /Next   33 0 R
+>>
+endobj
+34 0 obj
+<<
+  /Title  (DDR output)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 728 277 744 283]
+  /Parent 33 0 R
+  /Next   35 0 R
+>>
+endobj
+35 0 obj
+<<
+  /Title  (FIXED_IO output)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 728 271 752 277]
+  /Parent 33 0 R
+  /Prev   34 0 R
+>>
+endobj
+33 0 obj
+<<
+  /Title  (ports)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 24 0 R
+  /First  34 0 R
+  /Last   35 0 R
+  /Count  2
+  /Prev   25 0 R
+  /Next   36 0 R
+>>
+endobj
+36 0 obj
+<<
+  /Title  (portBuses)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 24 0 R
+  /First  0 0 R
+  /Last   0 0 R
+  /Count  0
+  /Prev   33 0 R
+  /Next   37 0 R
+>>
+endobj
+38 0 obj
+<<
+  /Title  (StreamingDataflowPartition_1_m_axis_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 404 307 419 307]
+  /Parent 37 0 R
+  /Next   39 0 R
+>>
+endobj
+39 0 obj
+<<
+  /Title  (axi_interconnect_0_M00_AXI)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 230 312 247 338]
+  /Parent 37 0 R
+  /Prev   38 0 R
+  /Next   40 0 R
+>>
+endobj
+40 0 obj
+<<
+  /Title  (axi_interconnect_0_M01_AXI)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 230 312 420 332]
+  /Parent 37 0 R
+  /Prev   39 0 R
+  /Next   41 0 R
+>>
+endobj
+41 0 obj
+<<
+  /Title  (idma0_m_axi_gmem0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 322 276 512 311]
+  /Parent 37 0 R
+  /Prev   40 0 R
+  /Next   42 0 R
+>>
+endobj
+42 0 obj
+<<
+  /Title  (idma0_m_axis_0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 322 303 345 314]
+  /Parent 37 0 R
+  /Prev   41 0 R
+  /Next   43 0 R
+>>
+endobj
+43 0 obj
+<<
+  /Title  (odma0_m_axi_gmem0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 495 270 512 305]
+  /Parent 37 0 R
+  /Prev   42 0 R
+  /Next   44 0 R
+>>
+endobj
+44 0 obj
+<<
+  /Title  (rst_zynq_ps_100M_peripheral_aresetn)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 139 258 512 347]
+  /Parent 37 0 R
+  /Prev   43 0 R
+  /Next   45 0 R
+>>
+endobj
+45 0 obj
+<<
+  /Title  (smartconnect_0_M00_AXI)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 583 268 595 268]
+  /Parent 37 0 R
+  /Prev   44 0 R
+  /Next   46 0 R
+>>
+endobj
+46 0 obj
+<<
+  /Title  (zynq_ps_DDR)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 714 280 729 280]
+  /Parent 37 0 R
+  /Prev   45 0 R
+  /Next   47 0 R
+>>
+endobj
+47 0 obj
+<<
+  /Title  (zynq_ps_FCLK_CLK0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 37 232 720 361]
+  /Parent 37 0 R
+  /Prev   46 0 R
+  /Next   48 0 R
+>>
+endobj
+48 0 obj
+<<
+  /Title  (zynq_ps_FCLK_RESET0_N)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 34 250 723 379]
+  /Parent 37 0 R
+  /Prev   47 0 R
+  /Next   49 0 R
+>>
+endobj
+49 0 obj
+<<
+  /Title  (zynq_ps_FIXED_IO)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 714 274 729 274]
+  /Parent 37 0 R
+  /Prev   48 0 R
+  /Next   50 0 R
+>>
+endobj
+50 0 obj
+<<
+  /Title  (zynq_ps_M_AXI_GP0)
+  /C      [0.0 0.0 0.4]
+  /Dest   [10 0 R /FitR 143 261 721 377]
+  /Parent 37 0 R
+  /Prev   49 0 R
+>>
+endobj
+37 0 obj
+<<
+  /Title  (nets)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 24 0 R
+  /First  38 0 R
+  /Last   50 0 R
+  /Count  13
+  /Prev   36 0 R
+  /Next   51 0 R
+>>
+endobj
+51 0 obj
+<<
+  /Title  (netBundles)
+  /C      [0.0 0.4 0.0]
+  /F      1
+  /Parent 24 0 R
+  /First  0 0 R
+  /Last   0 0 R
+  /Count  0
+  /Prev   37 0 R
+>>
+endobj
+24 0 obj
+<<
+  /Title  (Nlview page 1)
+  /C      [0.4 0.0 0.0]
+  /Dest   [10 0 R /Fit]
+  /Parent 7 0 R
+  /First  25 0 R
+  /Last   51 0 R
+  /Count  5
+>>
+endobj
+8 0 obj
+<<
+>>
+endobj
+9 0 obj
+<<
+  /Im0 12 0 R
+  /Im1 13 0 R
+  /Im2 14 0 R
+  /Im3 15 0 R
+  /Im4 16 0 R
+  /Im5 17 0 R
+  /Im6 18 0 R
+  /Im7 19 0 R
+  /Im8 20 0 R
+  /Im9 21 0 R
+  /Im10 22 0 R
+  /Im11 23 0 R
+>>
+endobj
+52 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 16
+  /Height 16
+  /Length 256
+>>
+stream
+�����������������������������������ÿÿ�������������ÿÿ�
+������������ÿÿ����������ÿÿ�������������ÿÿ���������ÿÿÿÿÿÿÿÿÿÿÿÿ����ÿÿÿÿÿÿÿÿÿÿÿÿ���������ÿÿ��������������ÿÿ��������������ÿÿ��������������ÿÿ��������������ÿÿ���������������������������������������endstream
+endobj
+12 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 16
+  /Height 16
+  /SMask 52 0 R
+  /Length 768
+>>
+stream
+������������������������������������������������������������������IIIfff€€€������������������������������������������2N‚2N‚���```������������������������������������2N‚2N‚���MMM������������������������������������2N‚2N‚���UUU������������������������€€€@@@UUU���2N‚2N‚���[[[������������������������������������2N‚2N‚���������������������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������������������������2N‚2N‚������������������������������������������2N‚2N‚������������������������������������������2N‚2N‚������������������������������������������2N‚2N‚������������������������������������������2N‚2N‚���������������������������������������������������������������������������������������������������������������������endstream
+endobj
+53 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ���������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ����������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿendstream
+endobj
+13 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 53 0 R
+  /Length 540
+>>
+stream
+?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž�����������������������������������ÿ������������������������������?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nžendstream
+endobj
+54 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ
�������������ÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿ��������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������ÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿ�����������������ÿÿÿÿÿÿðÿÿÿÿÿÿÿÿÿÿÿÿÿendstream
+endobj
+14 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 54 0 R
+  /Length 540
+>>
+stream
+?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž;v+€ª������Ft¢3f™����€€+€ª;l������������������������������?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž������������������������������������������������������������?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž������������������������������������������������������������?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž�����������������������������������������������������ÿ��ÿ3f™?nž?nž?nž?nž?nž?nž?ož?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nž?nžendstream
+endobj
+55 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 48
+  /Height 60
+  /Length 2880
+>>
+stream
+ÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��@@@@@@@@@@@@@@@@@@@@@@��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ�����p������������p�����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����ÿÐ����������Ðÿ����ÿÿÿÿÿÿÿÿÿÿÿ@@@@@@@@@@@����0ðÿÐ��������Ðÿð0����@@@@@@@@@@@����������������0ðÿÐ������Ðÿð0���������������������������������0ðÿÐ����Ðÿð0�����������������������������������0ðÿÐ��Ðÿð0�������������������������������������0ðÿÐÐÿð0���������������������������������������0ðÿÓÓÿð0�����������������������������������������0ñÿÿñ0������������������������������������������ÙÿÿÙ�����������������������������������������ÐÿóóÿÐ���������������������������������������Ðÿð00ðÿÐ�������������������������������������Ðÿð0��0ðÿÐ�����������������������������������Ðÿð0����0ðÿÐ���������������������������������Ðÿð0������0ðÿÐ����������������ÿÿÿÿÿÿÿÿÿÿÿ����Ðÿð0��������0ðÿÐ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ���� ÿð0����������0ðÿ ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����À0������������0À����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��@@@@@@@@@@@@@@@@@@@@@@��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ�����p������������p�����ÿÿÿÿÿÿÿÿÿÿÿ@@@@@@@@@@@����ÿÐ����������Ðÿ����@@@@@@@@@@@���������������0ðÿÐ��������Ðÿð0�������������������������������0ðÿÐ������Ðÿð0���������������������������������0ðÿÐ����Ðÿð0�����������������������������������0ðÿÐ��Ðÿð0�������������������������������������0ðÿÐÐÿð0���������������������������������������0ðÿÓÓÿð0�����������������������������������������0ñÿÿñ0������������������������������������������ÙÿÿÙ�����������������������������������������ÐÿóóÿÐ���������������������������������������Ðÿð00ðÿÐ�������������������������������������Ðÿð0��0ðÿÐ�����������������������������������Ðÿð0����0ðÿÐ�����������������ÿÿÿÿÿÿÿÿÿÿÿ�����Ðÿð0������0ðÿÐ�����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����Ðÿð0��������0ðÿÐ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ���� ÿð0����������0ðÿ ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����À0������������0À����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��@@@@@@@@@@@@@@@@@@@@@@��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿ@@@@@@@@@@@��������������������������@@@@@@@@@@@endstream
+endobj
+15 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 48
+  /Height 60
+  /SMask 55 0 R
+  /Length 8640
+>>
+stream
+.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ���������������.P„0P€������������������������������������0P€.P„���������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������.Pƒ.Oƒ/Nƒ0P€������������������������������0P€/Nƒ.Oƒ.Pƒ������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������0P….Oƒ.Oƒ/Nƒ0P€������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������������0P…-Oƒ.Oƒ.Oƒ-Oƒ0P…������������������������������������������������������������������������������������������������������������������������������0P€.O‚.Oƒ.Oƒ.O‚0P€���������������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ.Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������������0P….Oƒ.Oƒ/Nƒ0P€������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€/Nƒ.Oƒ.Oƒ0P…������������������������0P….Oƒ.Oƒ/Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������.Pƒ.Oƒ.Oƒ0P…������������������������������0P….Oƒ.Oƒ.Pƒ������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€.Nƒ0P…������������������������������������0P….Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ���������������.P„0P€������������������������������������0P€.P„���������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������.Pƒ.Oƒ/Nƒ0P€������������������������������0P€/Nƒ.Oƒ.Pƒ������������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ���������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������������0P…-Oƒ.Oƒ.Oƒ-Oƒ0P…������������������������������������������������������������������������������������������������������������������������������0P€.O‚.Oƒ.Oƒ.O‚0P€���������������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ.Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ���������������0P€/Nƒ.Oƒ.Oƒ0P…������������������0P….Oƒ.Oƒ/Nƒ0P€���������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€/Nƒ.Oƒ.Oƒ0P…������������������������0P….Oƒ.Oƒ/Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������.Pƒ.Oƒ.Oƒ0P…������������������������������0P….Oƒ.Oƒ.Pƒ������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€.Nƒ0P…������������������������������������0P….Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������������������������������������������������������������������������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒendstream
+endobj
+56 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 44
+  /Height 44
+  /Length 1936
+>>
+stream
+��������������������������������������������������������������������������������������������������3333333333333333333333333'��������������~ÞÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÀc���������?êÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÌ�������9ùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞ�����äÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ±�����rÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ3����Ïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����öÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿº���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÏ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒ���	ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÌ����ðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ´����½ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ~����Zÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿù!����Ìÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ������çÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿº�������Àÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ“����������H¥Ìùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿí̍0�������������������������������������������������endstream
+endobj
+16 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 44
+  /Height 44
+  /SMask 56 0 R
+  /Length 5808
+>>
+stream
+������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚2P‚4Nƒ������������������������������������������3Uˆ3O‚3N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1MUUª���������������������������1M‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3M€���������������������1L‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3N‚1I†���������������+U€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������������1N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2P‚������������3Nƒ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1N‚���������+U€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3Nƒ���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������3Uˆ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚���������9UŽ2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚������������2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3O‚������������3O‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚.Mƒ������������UUª2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚3N‚������������������3M€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1N‚+U€���������������������3M€2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚+U€������������������������������2Nƒ1M‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚2N‚1M‚2N‚3N‚0P€���������������������������������������������������������������������������������������������������������������������������������������������������endstream
+endobj
+57 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+��ÿÿ��ÿÿ����ÿÿ��ÿÿ������������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ������������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ������������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ������������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ��endstream
+endobj
+17 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 57 0 R
+  /Length 540
+>>
+stream
+������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������endstream
+endobj
+58 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+ÿÿ������ÿÿÿÿ������ÿÿ����ÿÿ��������ÿÿ����ÿÿ������ÿÿÿÿ������ÿÿ����ÿÿ��������ÿÿ����ÿÿ������ÿÿÿÿ������ÿÿ����ÿÿ��������ÿÿ����ÿÿ������ÿÿÿÿ������ÿÿ����ÿÿ��������ÿÿ����ÿÿ������ÿÿÿÿ������ÿÿendstream
+endobj
+18 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 58 0 R
+  /Length 540
+>>
+stream
+?nž?nž������������������?nž?nž?nž?nž������������������?nž?nž������������?nž?nž������������������������?nž?nž������������?nž?nž������������������?nž?nž?nž?nž������������������?nž?nž������������?nž?nž������������������������?nž?nž������������?nž?nž������������������?nž?nž?nž?nž������������������?nž?nž������������?nž?nž������������������������?nž?nž������������?nž?nž������������������?nž?nž?nž?nž������������������?nž?nž������������?nž?nž������������������������?nž?nž������������?nž?nž������������������?nž?nž?nž?nž������������������?nž?nžendstream
+endobj
+59 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+��ÿÿ��ÿÿ����ÿÿ��ÿÿ����������������������������������������������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ����������������������������������������������������������������ÿÿ��ÿÿ����ÿÿ��ÿÿ��endstream
+endobj
+19 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 59 0 R
+  /Length 540
+>>
+stream
+������?nž?nž������?nž?nž������������?nž?nž������?nž?nž��������������������������������������������������������������������������ÿ��ÿ������Ck¡Df™������������������������9qœ<o¢������������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������endstream
+endobj
+60 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 48
+  /Height 60
+  /Length 2880
+>>
+stream
+ÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��@@@@@@@@@@@@@@@@@@@@@@��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ�����p������������p�����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����ÿÐ����������Ðÿ����ÿÿÿÿÿÿÿÿÿÿÿ@@@@@@@@@@@����0ðÿÐ��������Ðÿð0����@@@@@@@@@@@����������������0ðÿÐ������Ðÿð0���������������������������������0ðÿÐ����Ðÿð0�����������������������������������0ðÿÐ��Ðÿð0�������������������������������������0ðÿÐÐÿð0���������������������������������������0ðÿÓÓÿð0�����������������������������������������0ñÿÿñ0������������������������������������������ÙÿÿÙ�����������������������������������������ÐÿóóÿÐ���������������������������������������Ðÿð00ðÿÐ�������������������������������������Ðÿð0��0ðÿÐ�����������������������������������Ðÿð0����0ðÿÐ���������������������������������Ðÿð0������0ðÿÐ����������������ÿÿÿÿÿÿÿÿÿÿÿ����Ðÿð0��������0ðÿÐ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ���� ÿð0����������0ðÿ ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����À0������������0À����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��@@@@@@@@@@@@@@@@@@@@@@��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ�����p������������p�����ÿÿÿÿÿÿÿÿÿÿÿ@@@@@@@@@@@����ÿÐ����������Ðÿ����@@@@@@@@@@@���������������0ðÿÐ��������Ðÿð0�������������������������������0ðÿÐ������Ðÿð0���������������������������������0ðÿÐ����Ðÿð0�����������������������������������0ðÿÐ��Ðÿð0�������������������������������������0ðÿÐÐÿð0���������������������������������������0ðÿÓÓÿð0�����������������������������������������0ñÿÿñ0������������������������������������������ÙÿÿÙ�����������������������������������������ÐÿóóÿÐ���������������������������������������Ðÿð00ðÿÐ�������������������������������������Ðÿð0��0ðÿÐ�����������������������������������Ðÿð0����0ðÿÐ�����������������ÿÿÿÿÿÿÿÿÿÿÿ�����Ðÿð0������0ðÿÐ�����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����Ðÿð0��������0ðÿÐ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ���� ÿð0����������0ðÿ ����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ����À0������������0À����ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��@@@@@@@@@@@@@@@@@@@@@@��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��ÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀÀ��ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ��������������������������ÿÿÿÿÿÿÿÿÿÿÿ@@@@@@@@@@@��������������������������@@@@@@@@@@@endstream
+endobj
+20 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 48
+  /Height 60
+  /SMask 60 0 R
+  /Length 8640
+>>
+stream
+.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ���������������.P„0P€������������������������������������0P€.P„���������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������.Pƒ.Oƒ/Nƒ0P€������������������������������0P€/Nƒ.Oƒ.Pƒ������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������0P….Oƒ.Oƒ/Nƒ0P€������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������������0P…-Oƒ.Oƒ.Oƒ-Oƒ0P…������������������������������������������������������������������������������������������������������������������������������0P€.O‚.Oƒ.Oƒ.O‚0P€���������������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ.Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������������0P….Oƒ.Oƒ/Nƒ0P€������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€/Nƒ.Oƒ.Oƒ0P…������������������������0P….Oƒ.Oƒ/Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������.Pƒ.Oƒ.Oƒ0P…������������������������������0P….Oƒ.Oƒ.Pƒ������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€.Nƒ0P…������������������������������������0P….Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ���������������.P„0P€������������������������������������0P€.P„���������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������.Pƒ.Oƒ/Nƒ0P€������������������������������0P€/Nƒ.Oƒ.Pƒ������������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ���������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€������0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ/Nƒ0P€0P€/Nƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������0P….Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0P…���������������������������������������������������������������������������������������������������������������������������0P…-Oƒ.Oƒ.Oƒ-Oƒ0P…������������������������������������������������������������������������������������������������������������������������������0P€.O‚.Oƒ.Oƒ.O‚0P€���������������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ.Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������������������������������������������������������������0P€/Nƒ.Oƒ.Oƒ0P…������������0P….Oƒ.Oƒ/Nƒ0P€���������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ���������������0P€/Nƒ.Oƒ.Oƒ0P…������������������0P….Oƒ.Oƒ/Nƒ0P€���������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€/Nƒ.Oƒ.Oƒ0P…������������������������0P….Oƒ.Oƒ/Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������.Pƒ.Oƒ.Oƒ0P…������������������������������0P….Oƒ.Oƒ.Pƒ������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������0P€.Nƒ0P…������������������������������������0P….Nƒ0P€������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ.Nƒ������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ������������������������������������������������������������������������������.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ.Oƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ������������������������������������������������������������������������������0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒ0Pƒendstream
+endobj
+61 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 100
+  /Height 31
+  /Length 3100
+>>
+stream
+����������������������������������������������������������������������`ÿÿÿÿÿÿÿÿÿÿï����������������������������������������������������������������������������������������ŸÿÿÿÿÿÿÿÿÿÿÏ����������������������������������������������������������������������������������������Ÿÿÿÿÿÿÿÿÿÿÿ¿����������������������������������������������������������������������������������������ÏÿÿÿÿÿÿÿÿÿÿŸ����������������������������������������������������������������������������������������Ïÿÿÿÿÿÿÿÿÿÿ€���������������������������������������������������������������������������������������� ïÿÿÿÿÿÿÿÿÿÿ`����������������������������������������������������������������������������������������0ïÿÿÿÿÿÿÿÿÿÿ@���������������������������������������������������������������������������@p€@0�������Pÿÿÿÿÿÿÿÿÿÿï0�����������0¿¿¿¿¿¿¿¿¿¿¿¿¿¿¿`€¿¿ ������������Ÿ¿¿`¿¯�����������¿`������@¿ÿÿÿÿÿÿß`������`¿¿¿¿¿¿¿¿¿¿Ï€€€€€€€€€€€@ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿp ïÿ¿�����������pÿÿ€�€ÿÿŸ�����������¿ÿ€�����ŸÿÿÿÏ¿¿¿ÿÿÿÏ��������������Ïÿÿÿÿÿÿÿÿÿÿp@@@@@@@@@@@Pÿÿß�pÿÿ`��������� ïÿÏ��€ÿÿÿP����������¿ÿ€����¯ÿÿ¯ ���� ÿÿÏ������������ÏÿÿÿÿÿÿÿÿÿÿŸ�������������¿ÿÿ0���Ïÿï��������¿ÿÿ0��€ÿÿÿï���������¿ÿ€���€ÿÿ`��������`ÿÿ������������Ÿÿÿÿÿÿÿÿÿÿÿ¯�������������pÿÿp����0ÿÿŸ�������Pÿÿ€���€ÿ¿¿ÿ¿���������¿ÿ€�� ÿÿ¯����������ÿÿ ����������ŸÿÿÿÿÿÿÿÿÿÿÏ������������0ÿÿ¿������€ÿÿ@�����ïÿÏ����€ÿ¿ ïÿp��������¿ÿ€��Ÿÿÿ����������ïÿŸ���������`ÿÿÿÿÿÿÿÿÿÿß������������ßÿï�������ßÿß�����¯ÿÿ0����€ÿ¿�pÿï0�������¿ÿ€��ïÿŸ������������Ÿÿï��������`ÿÿÿÿÿÿÿÿÿÿï0�������������ŸÿÿP��������@ÿÿ€���Pÿÿ€�����€ÿ¿��¿ÿÏ�������¿ÿ€�@ÿÿP������������Pÿÿ0������0ÿÿÿÿÿÿÿÿÿÿÿ0�������������`ÿÿ����������ÿÿ �ïÿÏ������€ÿ¿�� ïÿ€������¿ÿ€�pÿÿ������������ÿÿP�����0ïÿÿÿÿÿÿÿÿÿÿ`������������� ïÿÏ����������ïÿ¿�ÿï0������€ÿ¿���Pÿÿ@�����¿ÿ€�€ÿï��������������ÿÿ€����� @@@@@@@@@@0��������������Ïÿï0������������PÿÿŸÿÿp�������€ÿ¿����¯ÿß����¿ÿ€�ÿ¿��������������Ïÿ€������������������������������ÿÿp��������������¯ÿÿÿ¿��������€ÿ¿����ïÿ����¿ÿ€�¿ÿ¿��������������¿ÿ€�����������������������������@ÿÿ¯��������������� ïÿï ��������€ÿ¿�����PÿÿP���¯ÿ€�€ÿÏ��������������ïÿ€����������������������������ïÿï����������������¿ÿ¿���������€ÿ¿������¯ÿï��€ÿ€�€ÿÿ��������������ÿÿ`����������������������������¿ÿÿ@�����������������¿ÿ¿���������€ÿ¿������ßÿ¯��€ÿ€�Pÿÿ0������������0ÿÿ0���������������������������pÿÿ������������������¿ÿ¿���������€ÿ¿�������@ÿÿp�€ÿ€� ÿÿp������������pÿï���������������������������0ÿÿÏ�������������������¿ÿ¿���������€ÿ¿��������ÿï €ÿ€��ÏÿÏ��������Ÿÿï0ßÿŸ��������������������������ßÿï �������������������¿ÿ¿���������€ÿ¿��������ßÿÏ€ÿ€��`ÿÿP��������¿ÿÿÿÿ@��������������������������Ÿÿÿ`��������������������¿ÿ¿���������€ÿ¿���������@ÿÿïÿ€���ßÿï �������Ïÿÿ¯��������������������������`ÿÿ¯���������������������¿ÿ¿���������€ÿ¿����������ÿÿÿ€���0ïÿï@������PïÿÿÏ��������������������������ïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ@��������¿ÿ¿���������€ÿ¿�����������Ïÿÿ€����`ïÿÿ¿€@@€ÏÿÿïïÿŸ��`Ÿ¯0`������������������ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ@��������¿ÿ¿���������€ÿ¿�����������0ÿÿ€�����0¿ÿÿÿÿÿÿÿÿ¯ 0ÿÿ��€ÿÏ€������������������@@@@@@@@@@@@@@@@��������0@0��������� @0������������0@ �������0€¯¿¿Ÿp ���`¿¿0�`Ÿ€`������������������endstream
+endobj
+21 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 100
+  /Height 31
+  /SMask 61 0 R
+  /Length 9300
+>>
+stream
+�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�@p�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�@p�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ei�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������@p�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������@p�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Fj�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Hh�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Dl���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ppphllikkjjlhlljjj����������������������Fi�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej�Ej���������������������������������jjjiiliiliiliiliiliiliiliiliiliiliiliiliiliiliiljjmjjliiliilhhp������������������������������������hjkiiliilpppjjmiilijlppp���������������������������������iimiiljjm������������������hlliilijlijlijlijlijlijlijkjjm�������������������Ej�Ei�Ei�Ei�Ei�Ei�Ei�Ei�Ei�Ei�Ei+cƒ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕhllijlijlijlijlijlijlijlijlijlijlijlijlijlijlijlikkhhpijlijliil���������������������������������ikkijlijljjl���jjlijlijlhjk���������������������������������iilijljjl���������������hjkijlijlijlijliiliiliilijlijlijlijlppp������������������������������������������¿ÏϹÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¸ÈÖppphllhllhllhllhllhllhllhllhllhllhlliilijlijlijkppp���ikkijlijljjm���������������������������hhpijlijlijl������jjlijlijlijliil������������������������������iilijljjl������������ijlijlijlijlhhp������������hhpiimijlijlijlppp������������������������������������¿ÏϹÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¸ÈÕ���������������������������������������iilijlijljjj���������ijlijlijlppp������������������������iilijlijljjj������jjlijlijlijlijlppp���������������������������iilijljjl���������jjlijlijljjm������������������������jjmijlijliim������������������������������������¸ÈÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ���������������������������������������ikkijlijlikk������������jjjijlijlhjk���������������������iilijlijljjl���������jjlijliiliilijliil���������������������������iilijljjl������hhpijlijlijl������������������������������iimijlijlhhp������������������������������¸ÈÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¿ÏÏ������������������������������������jjjijlijliil������������������jjlijlijlhll���������������pppijlijlijl������������jjlijliilhhpijlijlikk������������������������iilijljjl������hjkijlijlppp������������������������������pppijlijlhjk���������������������������ºÊÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¿ÏÏ������������������������������������pppijkijlijlppp���������������������ijkijlijk���������������ijlijlijljjj������������jjlijliil���ikkijlijljjj���������������������iilijljjl������ijlijlhjk������������������������������������hjkijlijl������������������������ºÊÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕºÊÕ���������������������������������������hjkijlijliil������������������������hllijlijljjl���������iilijlijljjl���������������jjlijliil������iilijlijl���������������������iilijljjl���hllijlijliil������������������������������������iilijlijljjj������������������ºÊÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕºÊÕ���������������������������������������jjmijlijliim������������������������������iimijlijlhhp���pppijlijlijl������������������jjlijliil������hhpijlijljjl������������������iilijljjl���ikkijlijlppp������������������������������������pppijlijliil���������������ºÊÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕ¹ÉÕºÊÕ���������������������������������������hhpijlijlijlppp������������������������������pppijlijliil���iimijlijljjj������������������jjlijliil���������iilijlijlhll���������������iilijljjl���jjlijlijl������������������������������������������ijlijljjl���������������·Ç×·ÇÓ·ÇÓ·ÇÓ·ÇÓ·ÇÓ·ÇÓ·ÇÓ·ÇÓ·ÇÓ·ÇÓºÊÕ������������������������������������������ijlijlijljjj������������������������������������iilijlijlhjkijlijlikk���������������������jjlijliil������������ijlijlijkppp������������iilijljjl���iimijliil������������������������������������������ijlijljjl������������������������������������������������������������������������������������������iimijlijlikk������������������������������������������ijlijlijlijliil������������������������jjlijliil������������pppijlijliim������������iilijljjl���iilijliil������������������������������������������iilijljjl���������������������������������������������������������������������������������������hllijlijlijl���������������������������������������������hhpijlijlijlhhp������������������������jjlijliil���������������iilijlijliil���������ijlijljjl���jjlijlijl������������������������������������������ijlijljjl������������������������������������������������������������������������������������pppijlijlijlppp������������������������������������������������iilijliil���������������������������jjlijliil������������������ijlijlijlppp������jjlijljjl���jjlijlijl������������������������������������������ijlijljjm������������������������������������������������������������������������������������iilijlijlhll���������������������������������������������������iilijliil���������������������������jjlijliil������������������pppijkijlijl������jjlijljjl���iilijlijljjj������������������������������������jjjijlijljjj���������������������������������������������������������������������������������ikkijlijliim������������������������������������������������������iilijliil���������������������������jjlijliil���������������������hllijlijlikk���jjlijljjl���hhpijlijlikk������������������������������������ikkijlijl���������������������������������������������������������������������������������jjjijlijlijl���������������������������������������������������������iilijliil���������������������������jjlijliil������������������������iimijlijlhhpjjlijljjl������ijlijlijl������������������������hjkijlijljjjijkijlhjk������������������������������������������������������������������������������pppijkijlijlhhp���������������������������������������������������������iilijliil���������������������������jjlijliil������������������������pppijkijlijljjlijljjl������jjmijlijliil������������������������iilijlijlijlijlhll������������������������������������������������������������������������������hjkijlijljjm������������������������������������������������������������iilijliil���������������������������jjlijliil���������������������������hllijlijlijlijljjl���������ijkijlijlhhp���������������������pppijlijlijlijl������������������������������������������������������������������������������jjmijlijlijl���������������������������������������������������������������iilijliil���������������������������jjlijliil������������������������������iimijlijlijljjl���������jjjijlijlijlhll������������������iilijlijlijlijl������������������������������������������������������������������������������ijlijlijlijlijlijlijlijlijlijlijlijlijlijlijlijlhll������������������������iilijliil���������������������������jjlijliil���������������������������������ijlijlijljjl������������jjmijlijlijliiljjlhllhlljjlijlijlijlijlijlijlhjk������jjmhjkijljjjjjm������������������������������������������������������ijlijlijlijlijlijlijlijlijlijlijlijlijlijlijlijlhll������������������������iilijliil���������������������������jjlijliil���������������������������������jjjijlijljjl���������������jjjiilijlijlijlijlijlijlijlijlijlhhpjjjijlijliim������jjlijlijljjl������������������������������������������������������hllhllhllhllhllhllhllhllhllhllhllhllhllhllhllhllppp������������������������jjjhlljjj���������������������������hhphlljjj������������������������������������jjjhllhhp���������������������jjjjjlijliiliilhjkikkhhp���������jjmiiliiljjj���jjmhjkjjljjm������������������������������������������������������endstream
+endobj
+62 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+ÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��ÿÿ��ÿÿÿÿ��Îÿ��ÿÿendstream
+endobj
+22 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 62 0 R
+  /Length 540
+>>
+stream
+Fl“?nž������?nž?nž������?nž?nžFl“?nž������?nž?nž������?nž?nžFl“?nž������?nž?nž������?nž?nžFl“?nž������?nž?nž������?nž?nžFl“?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nž?nž?nž������?nž?nž������?nž?nžendstream
+endobj
+63 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceGray
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /Length 180
+>>
+stream
+��ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ����ÿÿ��ÿÿ��endstream
+endobj
+23 0 obj
+<<
+  /Type /XObject
+  /Subtype /Image
+  /ColorSpace /DeviceRGB
+  /BitsPerComponent 8
+  /Width 10
+  /Height 18
+  /SMask 63 0 R
+  /Length 540
+>>
+stream
+������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������������?nž?nž������?nž?nž������endstream
+endobj
+7 0 obj
+<<
+  /Type  /Outline
+  /First 24 0 R
+  /Last  24 0 R
+  /Count 1
+>>
+endobj
+xref
+0 64
+0000000000 65535 f 
+0000000009 00000 n 
+0000000213 00000 n 
+0000023506 00000 n 
+0000000363 00000 n 
+0000000490 00000 n 
+0000023625 00000 n 
+0000082154 00000 n 
+0000028640 00000 n 
+0000028661 00000 n 
+0000000700 00000 n 
+0000000843 00000 n 
+0000029269 00000 n 
+0000030554 00000 n 
+0000031611 00000 n 
+0000035369 00000 n 
+0000046284 00000 n 
+0000052610 00000 n 
+0000053667 00000 n 
+0000054724 00000 n 
+0000058482 00000 n 
+0000070562 00000 n 
+0000080381 00000 n 
+0000081438 00000 n 
+0000028482 00000 n 
+0000025146 00000 n 
+0000023891 00000 n 
+0000024088 00000 n 
+0000024280 00000 n 
+0000024448 00000 n 
+0000024616 00000 n 
+0000024805 00000 n 
+0000024991 00000 n 
+0000025599 00000 n 
+0000025306 00000 n 
+0000025450 00000 n 
+0000025772 00000 n 
+0000028150 00000 n 
+0000025947 00000 n 
+0000026118 00000 n 
+0000026295 00000 n 
+0000026472 00000 n 
+0000026640 00000 n 
+0000026805 00000 n 
+0000026973 00000 n 
+0000027159 00000 n 
+0000027332 00000 n 
+0000027494 00000 n 
+0000027661 00000 n 
+0000027832 00000 n 
+0000027999 00000 n 
+0000028323 00000 n 
+0000028852 00000 n 
+0000030213 00000 n 
+0000031270 00000 n 
+0000032327 00000 n 
+0000044186 00000 n 
+0000052269 00000 n 
+0000053326 00000 n 
+0000054383 00000 n 
+0000055440 00000 n 
+0000067299 00000 n 
+0000080040 00000 n 
+0000081097 00000 n 
+trailer
+<<
+  /Size 64
+  /Info 1 0 R
+  /Root 2 0 R
+>>
+startxref
+82236
+%%EOF
diff --git a/requirements.txt b/requirements.txt
index 4eac22f03165ba934cf9a9517a14ed4cd8a4c7c6..ba7bc716b741911820e67f1455aeca4c05e6e005 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,11 +1,12 @@
-bitstring
-docrep
-future
+bitstring==3.1.7
+docrep==0.2.7
+future==0.18.2
+gspread==3.6.0
 numpy==1.18.0
-onnx==1.5.0
+onnx==1.6.0
 onnxruntime==1.2.0
-pre-commit
-pyverilator
-scipy
-sphinx
-wget
+pre-commit==2.6.0
+scipy==1.5.2
+toposort==1.5
+vcdvcd==1.0.5
+wget==3.2
diff --git a/run-docker.sh b/run-docker.sh
index b7f844d314c5fb67e11e0933f42b3edfa4d96036..219e5c258f2e4d8b4c95d1c0a84cd1a636510e24 100755
--- a/run-docker.sh
+++ b/run-docker.sh
@@ -27,13 +27,36 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+
+# green echo
+gecho () {
+  echo -e "${GREEN}$1${NC}"
+}
+
+# red echo
+recho () {
+  echo -e "${RED}$1${NC}"
+}
+
 if [ -z "$VIVADO_PATH" ];then
-        echo "For correct implementation please set an environment variable VIVADO_PATH that contains the path to your vivado installation directory"
-        exit 1
+        recho "Please set the VIVADO_PATH that contains the path to your Vivado installation directory."
+        recho "FINN functionality depending on Vivado or Vivado HLS will not be available."
 fi
 
 if [ -z "$PYNQ_IP" ];then
-        echo "Please set the PYNQ_IP env.var. to enable PYNQ deployment tests."
+        recho "Please set the PYNQ_IP env.var. to enable PYNQ deployment tests."
+fi
+
+if [ -z "$VITIS_PATH" ];then
+        recho "Please set the VITIS_PATH that contains the path to your Vitis installation directory."
+        recho "FINN functionality depending on Vitis will not be available."
+else
+    if [ -z "$PLATFORM_REPO_PATHS" ];then
+            recho "Please set PLATFORM_REPO_PATHS pointing to Vitis platform files (DSAs)."
+    fi
 fi
 
 DOCKER_GID=$(id -g)
@@ -51,6 +74,11 @@ DOCKER_INST_NAME="finn_dev_${DOCKER_UNAME}"
 # ensure Docker tag and inst. name are all lowercase
 DOCKER_TAG=$(echo "$DOCKER_TAG" | tr '[:upper:]' '[:lower:]')
 DOCKER_INST_NAME=$(echo "$DOCKER_INST_NAME" | tr '[:upper:]' '[:lower:]')
+# Absolute path to this script, e.g. /home/user/bin/foo.sh
+SCRIPT=$(readlink -f "$0")
+# Absolute path this script is in, thus /home/user/bin
+SCRIPTPATH=$(dirname "$SCRIPT")
+
 # the settings below will be taken from environment variables if available,
 # otherwise the defaults below will be used
 : ${JUPYTER_PORT=8888}
@@ -60,11 +88,13 @@ DOCKER_INST_NAME=$(echo "$DOCKER_INST_NAME" | tr '[:upper:]' '[:lower:]')
 : ${PYNQ_BOARD="Pynq-Z1"}
 : ${PYNQ_TARGET_DIR="/home/xilinx/$DOCKER_INST_NAME"}
 : ${NUM_DEFAULT_WORKERS=1}
-
-# Absolute path to this script, e.g. /home/user/bin/foo.sh
-SCRIPT=$(readlink -f "$0")
-# Absolute path this script is in, thus /home/user/bin
-SCRIPTPATH=$(dirname "$SCRIPT")
+: ${FINN_SSH_KEY_DIR="$SCRIPTPATH/ssh_keys"}
+: ${ALVEO_USERNAME="alveo_user"}
+: ${ALVEO_PASSWORD=""}
+: ${ALVEO_BOARD="U250"}
+: ${ALVEO_TARGET_DIR="/tmp"}
+: ${XILINX_XRT="/opt/xilinx/xrt"}
+: ${PLATFORM_REPO_PATHS="/opt/xilinx/platforms"}
 
 BUILD_LOCAL=/tmp/$DOCKER_INST_NAME
 VIVADO_HLS_LOCAL=$VIVADO_PATH
@@ -73,24 +103,32 @@ VIVADO_IP_CACHE=$BUILD_LOCAL/vivado_ip_cache
 # ensure build dir exists locally
 mkdir -p $BUILD_LOCAL
 mkdir -p $VIVADO_IP_CACHE
+mkdir -p $FINN_SSH_KEY_DIR
+
+gecho "Instance is named as $DOCKER_INST_NAME"
+gecho "Mounting $BUILD_LOCAL into $BUILD_LOCAL"
+gecho "Mounting $VIVADO_PATH into $VIVADO_PATH"
+gecho "Mounting $VITIS_PATH into $VITIS_PATH"
+gecho "Port-forwarding for Jupyter $JUPYTER_PORT:$JUPYTER_PORT"
+gecho "Port-forwarding for Netron $NETRON_PORT:$NETRON_PORT"
+gecho "Vivado IP cache dir is at $VIVADO_IP_CACHE"
+gecho "Using default PYNQ board $PYNQ_BOARD"
 
-echo "Instance is named as $DOCKER_INST_NAME"
-echo "Mounting $BUILD_LOCAL into $BUILD_LOCAL"
-echo "Mounting $VIVADO_PATH into $VIVADO_PATH"
-echo "Port-forwarding for Jupyter $JUPYTER_PORT:$JUPYTER_PORT"
-echo "Port-forwarding for Netron $NETRON_PORT:$NETRON_PORT"
-echo "Vivado IP cache dir is at $VIVADO_IP_CACHE"
-echo "Using default PYNQ board $PYNQ_BOARD"
+DOCKER_INTERACTIVE=""
 
 if [ "$1" = "test" ]; then
-        echo "Running test suite"
+        gecho "Running test suite (all tests)"
         DOCKER_CMD="python setup.py test"
+elif [ "$1" = "quicktest" ]; then
+        gecho "Running test suite (non-Vivado, non-slow tests)"
+        DOCKER_CMD="quicktest.sh"
 elif [ "$1" = "notebook" ]; then
-        echo "Running Jupyter notebook server"
+        gecho "Running Jupyter notebook server"
         DOCKER_CMD="jupyter notebook --ip=0.0.0.0 --port $JUPYTER_PORT notebooks"
 else
-        echo "Running container only"
+        gecho "Running container only"
         DOCKER_CMD="bash"
+        DOCKER_INTERACTIVE="-it"
 fi
 
 # Build the FINN Docker image
@@ -106,23 +144,49 @@ docker build -f docker/Dockerfile.finn_dev --tag=$DOCKER_TAG \
 # Launch container with current directory mounted
 # important to pass the --init flag here for correct Vivado operation, see:
 # https://stackoverflow.com/questions/55733058/vivado-synthesis-hangs-in-docker-container-spawned-by-jenkins
-docker run -t --rm --name $DOCKER_INST_NAME -it --init \
---hostname $DOCKER_INST_NAME \
--e "XILINX_VIVADO=$VIVADO_PATH" \
--e "SHELL=/bin/bash" \
--v $SCRIPTPATH:/workspace/finn \
--v $BUILD_LOCAL:$BUILD_LOCAL \
--v $VIVADO_PATH:$VIVADO_PATH \
--e VIVADO_PATH=$VIVADO_PATH \
--e FINN_INST_NAME=$DOCKER_INST_NAME \
--e FINN_ROOT="/workspace/finn" \
--e VIVADO_IP_CACHE="$VIVADO_IP_CACHE" \
--e PYNQ_BOARD=$PYNQ_BOARD \
--e PYNQ_IP=$PYNQ_IP \
--e PYNQ_USERNAME=$PYNQ_USERNAME \
--e PYNQ_PASSWORD=$PYNQ_PASSWORD \
--e PYNQ_TARGET_DIR=$PYNQ_TARGET_DIR \
--e NUM_DEFAULT_WORKERS=$NUM_DEFAULT_WORKERS \
--p $JUPYTER_PORT:$JUPYTER_PORT \
--p $NETRON_PORT:$NETRON_PORT \
-$DOCKER_TAG $DOCKER_CMD
+DOCKER_EXEC="docker run -t --rm --name $DOCKER_INST_NAME $DOCKER_INTERACTIVE --init "
+DOCKER_EXEC+="--hostname $DOCKER_INST_NAME "
+DOCKER_EXEC+="-e SHELL=/bin/bash "
+DOCKER_EXEC+="-v $SCRIPTPATH:/workspace/finn "
+DOCKER_EXEC+="-v $BUILD_LOCAL:$BUILD_LOCAL "
+DOCKER_EXEC+="-v $FINN_SSH_KEY_DIR:/home/$DOCKER_UNAME/.ssh "
+DOCKER_EXEC+="-e FINN_INST_NAME=$DOCKER_INST_NAME "
+DOCKER_EXEC+="-e FINN_ROOT="/workspace/finn" "
+DOCKER_EXEC+="-e VIVADO_IP_CACHE=$VIVADO_IP_CACHE "
+DOCKER_EXEC+="-e PYNQ_BOARD=$PYNQ_BOARD "
+DOCKER_EXEC+="-e PYNQ_IP=$PYNQ_IP "
+DOCKER_EXEC+="-e PYNQ_USERNAME=$PYNQ_USERNAME "
+DOCKER_EXEC+="-e PYNQ_PASSWORD=$PYNQ_PASSWORD "
+DOCKER_EXEC+="-e PYNQ_TARGET_DIR=$PYNQ_TARGET_DIR "
+DOCKER_EXEC+="-e NUM_DEFAULT_WORKERS=$NUM_DEFAULT_WORKERS "
+DOCKER_EXEC+="-p $JUPYTER_PORT:$JUPYTER_PORT "
+DOCKER_EXEC+="-p $NETRON_PORT:$NETRON_PORT "
+if [ ! -z "$VIVADO_PATH" ];then
+  DOCKER_EXEC+="-e "XILINX_VIVADO=$VIVADO_PATH" "
+  DOCKER_EXEC+="-v $VIVADO_PATH:$VIVADO_PATH "
+  DOCKER_EXEC+="-e VIVADO_PATH=$VIVADO_PATH "
+fi
+if [ ! -z "$VITIS_PATH" ];then
+  if [ -z "$PLATFORM_REPO_PATHS" ];then
+          recho "PLATFORM_REPO_PATHS must be set for Vitis/Alveo flows"
+          exit -1
+  fi
+  if [ -z "$XILINX_XRT" ];then
+          recho "XILINX_XRT must be set for Vitis/Alveo flows"
+          exit -1
+  fi
+  DOCKER_EXEC+="-v $VITIS_PATH:$VITIS_PATH "
+  DOCKER_EXEC+="-v $PLATFORM_REPO_PATHS:$PLATFORM_REPO_PATHS "
+  DOCKER_EXEC+="-v $XILINX_XRT:$XILINX_XRT "
+  DOCKER_EXEC+="-e VITIS_PATH=$VITIS_PATH "
+  DOCKER_EXEC+="-e PLATFORM_REPO_PATHS=$PLATFORM_REPO_PATHS "
+  DOCKER_EXEC+="-e XILINX_XRT=$XILINX_XRT "
+  DOCKER_EXEC+="-e ALVEO_IP=$ALVEO_IP "
+  DOCKER_EXEC+="-e ALVEO_USERNAME=$ALVEO_USERNAME "
+  DOCKER_EXEC+="-e ALVEO_PASSWORD=$ALVEO_PASSWORD "
+  DOCKER_EXEC+="-e ALVEO_BOARD=$ALVEO_BOARD "
+  DOCKER_EXEC+="-e ALVEO_TARGET_DIR=$ALVEO_TARGET_DIR "
+fi
+DOCKER_EXEC+="$DOCKER_TAG $DOCKER_CMD"
+
+$DOCKER_EXEC
diff --git a/setup.cfg b/setup.cfg
index 5974cda20e37449a879f7528516895fb7cea4264..7729d0949ee133e06242905afab31708e79ebf04 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -101,6 +101,10 @@ extras = True
 # in order to write a coverage file that can be read by Jenkins.
 addopts =
     --verbose
+markers =
+    slow: marks tests as slow (deselect with '-m "not slow"')
+    vivado: mark tests that require Vivado or Vivado HLS
+    vitis: mark tests that require Vitis
 norecursedirs =
     dist
     build
diff --git a/src/finn/analysis/fpgadataflow/dataflow_performance.py b/src/finn/analysis/fpgadataflow/dataflow_performance.py
new file mode 100644
index 0000000000000000000000000000000000000000..e678630ae97318af47dd432a7c68442a6642b65f
--- /dev/null
+++ b/src/finn/analysis/fpgadataflow/dataflow_performance.py
@@ -0,0 +1,78 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from finn.custom_op.registry import getCustomOp
+from finn.util.fpgadataflow import is_fpgadataflow_node
+
+
+def dataflow_performance(model):
+    """Extract key performance indicators from given model with dataflow nodes.
+    Note that the latency (critical path) analysis is very pessimistic, it
+    assumes no overlap between executions and simply sums the expected cycles
+    for each node along the critical path.
+
+    Preconditions:
+    - model consists of fpgadataflow nodes
+    - model has cycle estimates annotated (see AnnotateCycles transformation)
+    - nodes have unique names (see GiveUniqueNodeNames)
+
+    Returns:
+    - max_cycles : number of cycles for slowest node
+    - max_cycles_node_name : name of slowest node
+    - critical_path_cycles : pessimistic expected latency from input to output
+    """
+    latency_at_node_output = {}
+    max_cycles = 0
+    max_node_name = ""
+
+    for node in model.graph.node:
+        if is_fpgadataflow_node(node) is True:
+            inst = getCustomOp(node)
+            node_cycles = inst.get_nodeattr("cycles_estimate")
+            if node_cycles > max_cycles:
+                max_cycles = node_cycles
+                max_node_name = node.name
+            if node.name not in latency_at_node_output:
+                # calculate based on input(s)
+                predecessors = model.find_direct_predecessors(node)
+                if predecessors is None:
+                    # no predecessors, node is first node
+                    max_pred_latency = 0
+                else:
+                    # find max of any of predecessors
+                    pred_latencies = map(
+                        lambda x: latency_at_node_output[x.name], predecessors
+                    )
+                    max_pred_latency = max(pred_latencies)
+                latency_at_node_output[node.name] = node_cycles + max_pred_latency
+    critical_path_cycles = max(latency_at_node_output.values())
+    return {
+        "critical_path_cycles": critical_path_cycles,
+        "max_cycles": max_cycles,
+        "max_cycles_node_name": max_node_name,
+    }
diff --git a/src/finn/analysis/fpgadataflow/exp_cycles_per_layer.py b/src/finn/analysis/fpgadataflow/exp_cycles_per_layer.py
new file mode 100644
index 0000000000000000000000000000000000000000..201333aebdb3fc1d15464389e37326dcaf6848e0
--- /dev/null
+++ b/src/finn/analysis/fpgadataflow/exp_cycles_per_layer.py
@@ -0,0 +1,48 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import finn.custom_op.registry as registry
+from finn.util.fpgadataflow import is_fpgadataflow_node
+
+
+def exp_cycles_per_layer(model):
+    """Estimates the number of cycles per sample for dataflow layers in the given model.
+    Ensure that all nodes have unique names (by calling the GiveUniqueNodeNames
+    transformation) prior to calling this analysis pass to ensure all nodes are
+    visible in the results.
+
+    Returns {node name : cycle estimation}."""
+
+    cycle_dict = {}
+    for node in model.graph.node:
+        if is_fpgadataflow_node(node) is True:
+            op_type = node.op_type
+            inst = registry.custom_op[op_type](node)
+            cycle_dict[node.name] = inst.get_exp_cycles()
+
+    return cycle_dict
diff --git a/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py b/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py
index ad30282d93034f8d043a05a2172790349c31ec83..03b31b9c1ec51b45e17152d35d5824b6137ab4a2 100644
--- a/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py
+++ b/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py
@@ -35,6 +35,9 @@ from finn.util.fpgadataflow import is_fpgadataflow_node
 
 def hls_synth_res_estimation(model):
     """Extracts the FPGA resource results from the Vivado HLS synthesis estimates.
+    Ensure that all nodes have unique names (by calling the GiveUniqueNodeNames
+    transformation) prior to calling this analysis pass to ensure all nodes are
+    visible in the results.
 
     Returns {node name : resources_dict}."""
 
diff --git a/src/finn/analysis/fpgadataflow/post_synth_res.py b/src/finn/analysis/fpgadataflow/post_synth_res.py
index 508c34aaed50f2935f4915cdcea29a3e92641b3c..79204c54cdb8233fd7b65968c25af819fce91959 100644
--- a/src/finn/analysis/fpgadataflow/post_synth_res.py
+++ b/src/finn/analysis/fpgadataflow/post_synth_res.py
@@ -30,15 +30,23 @@ import os
 import xml.etree.ElementTree as ET
 
 from finn.transformation.move_reshape import _is_fpgadataflow_node
+from finn.core.modelwrapper import ModelWrapper
+from finn.custom_op.registry import getCustomOp
 
 
-def post_synth_res(model):
+def post_synth_res(model, override_synth_report_filename=None):
     """Extracts the FPGA resource results from the Vivado synthesis.
+    Ensure that all nodes have unique names (by calling the GiveUniqueNodeNames
+    transformation) prior to calling this analysis pass to ensure all nodes are
+    visible in the results.
 
     Returns {node name : resources_dict}."""
 
     res_dict = {}
-    synth_report_filename = model.get_metadata_prop("vivado_synth_rpt")
+    if override_synth_report_filename is not None:
+        synth_report_filename = override_synth_report_filename
+    else:
+        synth_report_filename = model.get_metadata_prop("vivado_synth_rpt")
     if os.path.isfile(synth_report_filename):
         tree = ET.parse(synth_report_filename)
         root = tree.getroot()
@@ -49,32 +57,54 @@ def post_synth_res(model):
     else:
         raise Exception("Please run synthesis first")
 
+    # TODO build these indices based on table headers instead of harcoding
+    restype_to_ind_default = {
+        "LUT": 2,
+        "SRL": 5,
+        "FF": 6,
+        "BRAM_36K": 7,
+        "BRAM_18K": 8,
+        "DSP48": 9,
+    }
+    restype_to_ind_vitis = {
+        "LUT": 4,
+        "SRL": 7,
+        "FF": 8,
+        "BRAM_36K": 9,
+        "BRAM_18K": 10,
+        "URAM": 11,
+        "DSP48": 12,
+    }
+
+    if model.get_metadata_prop("platform") == "alveo":
+        restype_to_ind = restype_to_ind_vitis
+    else:
+        restype_to_ind = restype_to_ind_default
+
+    def get_instance_stats(inst_name):
+        row = root.findall(".//*[@contents='%s']/.." % inst_name)
+        if row != []:
+            node_dict = {}
+            row = row[0].getchildren()
+            for (restype, ind) in restype_to_ind.items():
+                node_dict[restype] = int(row[ind].attrib["contents"])
+            return node_dict
+        else:
+            return None
+
+    # global (top-level) stats, including shell etc.
+    top_dict = get_instance_stats("(top)")
+    if top_dict is not None:
+        res_dict["(top)"] = top_dict
+
     for node in model.graph.node:
-        if _is_fpgadataflow_node(node):
-            row = root.findall(".//*[@contents='%s']/.." % node.name)
-            if row != []:
-                node_dict = {}
-                row = row[0].getchildren()
-                """ Expected XML structure:
-<tablerow class="" suppressoutput="0" wordwrap="0">
-    <tableheader class="" contents="Instance" halign="3" width="-1"/>
-    <tableheader class="" contents="Module" halign="3" width="-1"/>
-    <tableheader class="" contents="Total LUTs" halign="3" width="-1"/>
-    <tableheader class="" contents="Logic LUTs" halign="3" width="-1"/>
-    <tableheader class="" contents="LUTRAMs" halign="3" width="-1"/>
-    <tableheader class="" contents="SRLs" halign="3" width="-1"/>
-    <tableheader class="" contents="FFs" halign="3" width="-1"/>
-    <tableheader class="" contents="RAMB36" halign="3" width="-1"/>
-    <tableheader class="" contents="RAMB18" halign="3" width="-1"/>
-    <tableheader class="" contents="DSP48 Blocks" halign="3" width="-1"/>
-</tablerow>
-                """
-                node_dict["LUT"] = int(row[2].attrib["contents"])
-                node_dict["SRL"] = int(row[5].attrib["contents"])
-                node_dict["FF"] = int(row[6].attrib["contents"])
-                node_dict["BRAM_36K"] = int(row[7].attrib["contents"])
-                node_dict["BRAM_18K"] = int(row[8].attrib["contents"])
-                node_dict["DSP48"] = int(row[9].attrib["contents"])
+        if node.op_type == "StreamingDataflowPartition":
+            sdp_model = ModelWrapper(getCustomOp(node).get_nodeattr("model"))
+            sdp_res_dict = post_synth_res(sdp_model, synth_report_filename)
+            res_dict.update(sdp_res_dict)
+        elif _is_fpgadataflow_node(node):
+            node_dict = get_instance_stats(node.name)
+            if node_dict is not None:
                 res_dict[node.name] = node_dict
 
     return res_dict
diff --git a/src/finn/analysis/fpgadataflow/res_estimation.py b/src/finn/analysis/fpgadataflow/res_estimation.py
index c190059eceb0cc111477c84f843f4a9f9bf2f393..e52557573dab072709da4452f4e2d477e99b98c9 100644
--- a/src/finn/analysis/fpgadataflow/res_estimation.py
+++ b/src/finn/analysis/fpgadataflow/res_estimation.py
@@ -32,6 +32,9 @@ from finn.util.fpgadataflow import is_fpgadataflow_node
 
 def res_estimation(model):
     """Estimates the resources needed for the given model.
+    Ensure that all nodes have unique names (by calling the GiveUniqueNodeNames
+    transformation) prior to calling this analysis pass to ensure all nodes are
+    visible in the results.
 
     Returns {node name : resource estimation}."""
 
diff --git a/src/finn/analysis/topology.py b/src/finn/analysis/topology.py
index c825a221ec178ee89b4e3747c982e59a3005cadd..acdb8ed7fcf41fd041c3601b2ee4fe67b6dc5f19 100644
--- a/src/finn/analysis/topology.py
+++ b/src/finn/analysis/topology.py
@@ -79,3 +79,26 @@ def node_inputs_in_expected_order(model):
         if n.op_type != "Add":
             all_OK = all_OK and (model.get_initializer(n.input[1]) is not None)
     return {"node_inputs_in_expected_order": all_OK}
+
+
+def nodes_topologically_sorted(model):
+    """Verifies that graph.node is topologically sorted. This is required by the
+    ONNX specification.
+
+    Returns {"nodes_topologically_sorted": Bool}."""
+
+    # get successors of every node and check that
+    # successor index > current node index
+
+    all_OK = True
+    for n in model.graph.node:
+        successors = model.find_direct_successors(n)
+        if successors is not None:
+            for successor in successors:
+                # check the condition by checking the antithesis
+                index_n = model.get_node_index(n)
+                index_suc = model.get_node_index(successor)
+                if index_n > index_suc:
+                    all_OK = False
+
+    return {"nodes_topologically_sorted": all_OK}
diff --git a/src/finn/core/data_layout.py b/src/finn/core/data_layout.py
new file mode 100644
index 0000000000000000000000000000000000000000..3971d221527d3862346c06cf415831c27e5cba8b
--- /dev/null
+++ b/src/finn/core/data_layout.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# predefined lists of strings to have a cannonical way of expresing data layout
+# annotations
+
+NHWC = ["N", "H", "W", "C"]
+NCHW = ["N", "C", "H", "W"]
+NC = ["N", "C"]
+UNKNOWN = []
diff --git a/src/finn/core/datatype.py b/src/finn/core/datatype.py
index 222d11a8872f9be757fd60fbfa5f8abea683311a..df895a1ad446d6b2cc3ebb24f1179944f4cfe9ab 100644
--- a/src/finn/core/datatype.py
+++ b/src/finn/core/datatype.py
@@ -50,17 +50,69 @@ class DataType(Enum):
     UINT2 = auto()
     UINT3 = auto()
     UINT4 = auto()
+    UINT5 = auto()
+    UINT6 = auto()
+    UINT7 = auto()
     UINT8 = auto()
+    UINT9 = auto()
+    UINT10 = auto()
+    UINT11 = auto()
+    UINT12 = auto()
+    UINT13 = auto()
+    UINT14 = auto()
+    UINT15 = auto()
     UINT16 = auto()
+    UINT17 = auto()
+    UINT18 = auto()
+    UINT19 = auto()
+    UINT20 = auto()
+    UINT21 = auto()
+    UINT22 = auto()
+    UINT23 = auto()
+    UINT24 = auto()
+    UINT25 = auto()
+    UINT26 = auto()
+    UINT27 = auto()
+    UINT28 = auto()
+    UINT29 = auto()
+    UINT30 = auto()
+    UINT31 = auto()
     UINT32 = auto()
+    UINT64 = auto()
     BIPOLAR = auto()
     TERNARY = auto()
     INT2 = auto()
     INT3 = auto()
     INT4 = auto()
+    INT5 = auto()
+    INT6 = auto()
+    INT7 = auto()
     INT8 = auto()
+    INT9 = auto()
+    INT10 = auto()
+    INT11 = auto()
+    INT12 = auto()
+    INT13 = auto()
+    INT14 = auto()
+    INT15 = auto()
     INT16 = auto()
+    INT17 = auto()
+    INT18 = auto()
+    INT19 = auto()
+    INT20 = auto()
+    INT21 = auto()
+    INT22 = auto()
+    INT23 = auto()
+    INT24 = auto()
+    INT25 = auto()
+    INT26 = auto()
+    INT27 = auto()
+    INT28 = auto()
+    INT29 = auto()
+    INT30 = auto()
+    INT31 = auto()
     INT32 = auto()
+    INT64 = auto()
     FLOAT32 = auto()
 
     def bitwidth(self):
diff --git a/src/finn/core/modelwrapper.py b/src/finn/core/modelwrapper.py
index e99a6ef4cd40d6323d77354d3c9b4be341d7649c..42acc6fd277c9419920edd534e5660e85c7626b0 100644
--- a/src/finn/core/modelwrapper.py
+++ b/src/finn/core/modelwrapper.py
@@ -27,7 +27,7 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import copy
-
+import os
 import onnx
 import onnx.helper as oh
 import onnx.numpy_helper as np_helper
@@ -36,6 +36,12 @@ from onnx import TensorProto
 import finn.util.basic as util
 import finn.util.onnx as onnxutil
 from finn.core.datatype import DataType
+from finn.transformation.general import (
+    RemoveUnusedTensors,
+    RemoveStaticGraphInputs,
+    SortGraph,
+)
+from finn.transformation.double_to_single_float import DoubleToSingleFloat
 
 
 class ModelWrapper:
@@ -46,10 +52,12 @@ class ModelWrapper:
         """Creates a ModelWrapper instance.
         onnx_model_proto can be either a ModelProto instance, or a string
         with the path to a stored .onnx file on disk, or serialized bytes.
-        The make_deepcopy option controls whether a deep copy of the ModelProto
+
+        - make_deepcopy : controls whether a deep copy of the ModelProto
         is made internally.
         """
         if isinstance(onnx_model_proto, str):
+            assert os.path.isfile(onnx_model_proto)
             self._model_proto = onnx.load(onnx_model_proto)
         elif isinstance(onnx_model_proto, bytes):
             self._model_proto = onnx.load_from_string(onnx_model_proto)
@@ -87,20 +95,44 @@ class ModelWrapper:
         """Runs given anaylsis_fxn on this model and return resulting dict."""
         return analysis_fxn(self)
 
-    def transform(self, transformation, make_deepcopy=True):
+    def transform(
+        self, transformation, make_deepcopy=True, cleanup=True, fix_float64=True
+    ):
         """Applies given Transformation repeatedly until no more changes can be made
         and returns a transformed ModelWrapper instance.
 
-        If make_deepcopy is specified, operates on a new (deep)copy of model.
+        - make_deepcopy : operates on a new (deep)copy of model.
+        - fix_float64 : DoubleToSingleFloat correction before starting
+        - cleanup : execute cleanup transformations before returning
         """
         transformed_model = self
         if make_deepcopy:
             transformed_model = copy.deepcopy(self)
+        if fix_float64:
+            (transformed_model, model_was_changed) = DoubleToSingleFloat().apply(
+                transformed_model
+            )
         model_was_changed = True
         while model_was_changed:
             (transformed_model, model_was_changed) = transformation.apply(
                 transformed_model
             )
+        if cleanup:
+            transformed_model.cleanup()
+        return transformed_model
+
+    def cleanup(self):
+        "Run cleanup transformations on the model."
+        transformed_model = self
+        cleanup_transforms = [
+            RemoveUnusedTensors(),
+            RemoveStaticGraphInputs(),
+            SortGraph(),
+        ]
+        for trn in cleanup_transforms:
+            transformed_model = transformed_model.transform(
+                trn, cleanup=False, make_deepcopy=False
+            )
         return transformed_model
 
     def check_compatibility(self):
@@ -137,11 +169,16 @@ class ModelWrapper:
         qnt_annotations = graph.quantization_annotation
         ret = util.get_by_name(qnt_annotations, tensor_name, "tensor_name")
         if ret is not None:
-            ret = util.get_by_name(
+            ret_dt = util.get_by_name(
                 ret.quant_parameter_tensor_names, "finn_datatype", "key"
             )
-            if ret is not None:
-                ret.value = datatype.name
+            if ret_dt is not None:
+                ret_dt.value = datatype.name
+            else:
+                dt = onnx.StringStringEntryProto()
+                dt.key = "finn_datatype"
+                dt.value = datatype.name
+                ret.quant_parameter_tensor_names.append(dt)
         else:
             qa = onnx.TensorAnnotation()
             dt = onnx.StringStringEntryProto()
@@ -254,11 +291,10 @@ class ModelWrapper:
 
     def find_producer(self, tensor_name):
         """Finds and returns the node that produces the tensor with given name."""
-        ret = None
         for x in self._model_proto.graph.node:
             if tensor_name in x.output:
-                ret = x
-        return ret
+                return x
+        return None
 
     def find_upstream(self, tensor_name, finder_fxn):
         """Follow the producer chain upstream, calling finder_fxn on each upstream
@@ -288,6 +324,62 @@ class ModelWrapper:
         except ValueError:
             return None
 
+    def find_consumers(self, tensor_name):
+        """Finds and returns a list of the nodes that consume tensor with
+        given name."""
+        consumers = []
+        for n in self._model_proto.graph.node:
+            for inp_tensor in n.input:
+                if inp_tensor == tensor_name:
+                    consumers.append(n)
+        if consumers != []:
+            return consumers
+        else:
+            return None
+
+    def find_direct_successors(self, node):
+        """Finds and returns a list of the nodes that are successors of
+        given node."""
+        successors = []
+        for outp_tensor in node.output:
+            tensor_consumer_list = self.find_consumers(outp_tensor)
+            if tensor_consumer_list is not None:
+                for consumer in tensor_consumer_list:
+                    successors.append(consumer)
+        if successors != []:
+            return successors
+        else:
+            return None
+
+    def find_direct_predecessors(self, node):
+        """Finds and returns a list of the nodes that are predecessors of
+        given node."""
+        predecessors = []
+        for inp_tensor in node.input:
+            producer = self.find_producer(inp_tensor)
+            if producer is not None:
+                predecessors.append(producer)
+        if predecessors != []:
+            return predecessors
+        else:
+            return None
+
+    def is_fork_node(self, node):
+        """Checks if the given node is a fork, that is, the node has multiple
+        direct successors"""
+        direct_successors = self.find_direct_successors(node)
+        is_fork = False if direct_successors is None else (len(direct_successors) > 1)
+        return is_fork
+
+    def is_join_node(self, node):
+        """Checks if the given node is a join, that is, the node has multiple
+        direct predecessors"""
+        direct_predecessors = self.find_direct_predecessors(node)
+        is_join = (
+            False if direct_predecessors is None else (len(direct_predecessors) > 1)
+        )
+        return is_join
+
     def get_all_tensor_names(self):
         """Returns a list of all (input, output and value_info) tensor names
         in the graph."""
@@ -383,3 +475,107 @@ class ModelWrapper:
     def get_non_finn_nodes(self):
         """Returns a list of nodes where domain != 'finn'."""
         return list(filter(lambda x: x.domain != "finn", self.graph.node))
+
+    def get_node_index(self, node):
+        """Returns current index of given node."""
+        n_ind = 0
+        try:
+            for n in self.graph.node:
+                if n == node:
+                    return n_ind
+                n_ind += 1
+        except ValueError:
+            return None
+
+    def get_tensor_layout(self, tensor_name):
+        """Returns the data layout annotation of tensor with given name.
+        The data layout is expressed as a list of strings with as many
+        elements as the number of dimensions in the tensor shape. Each
+        string annotates what is contained in that dimension. If there is no
+        data layout annotation, None will be returned.
+        Examples of data layout annotations:
+        ["N", "C"] is tensor[batch][channel]
+        ["N", "C", "H", "W"] is tensor[batch][channel][height][width]
+        ["N", "H", "W", "C"] is tensor[batch][height][width][channel]
+        """
+        graph = self._model_proto.graph
+        qnt_annotations = graph.quantization_annotation
+        ret = util.get_by_name(qnt_annotations, tensor_name, "tensor_name")
+        if ret is not None:
+            ret = util.get_by_name(
+                ret.quant_parameter_tensor_names, "tensor_layout", "key"
+            )
+            if ret is not None:
+                return eval(ret.value)
+        return None
+
+    def set_tensor_layout(self, tensor_name, data_layout):
+        """Sets the data layout annotation of tensor with given name. See
+        get_tensor_layout for examples."""
+        tensor_shape = self.get_tensor_shape(tensor_name)
+        assert type(data_layout) == list, "data_layout must be a list"
+        if tensor_shape is not None:
+            assert len(tensor_shape) == len(
+                data_layout
+            ), """Mismatch between number
+            of dimensions of tensor shape and data layout annotation."""
+        graph = self._model_proto.graph
+        qnt_annotations = graph.quantization_annotation
+        ret = util.get_by_name(qnt_annotations, tensor_name, "tensor_name")
+        if ret is not None:
+            ret_tl = util.get_by_name(
+                ret.quant_parameter_tensor_names, "tensor_layout", "key"
+            )
+            if ret_tl is not None:
+                ret_tl.value = str(data_layout)
+            else:
+                tl = onnx.StringStringEntryProto()
+                tl.key = "tensor_layout"
+                tl.value = str(data_layout)
+                ret.quant_parameter_tensor_names.append(tl)
+        else:
+            qa = onnx.TensorAnnotation()
+            dt = onnx.StringStringEntryProto()
+            dt.key = "tensor_layout"
+            dt.value = str(data_layout)
+            qa.tensor_name = tensor_name
+            qa.quant_parameter_tensor_names.append(dt)
+            qnt_annotations.append(qa)
+
+    def get_tensor_sparsity(self, tensor_name):
+        """Returns the sparsity of a given tensor as dictionary."""
+        graph = self._model_proto.graph
+        qnt_annotations = graph.quantization_annotation
+        ret = util.get_by_name(qnt_annotations, tensor_name, "tensor_name")
+        if ret is not None:
+            ret = util.get_by_name(
+                ret.quant_parameter_tensor_names, "tensor_sparsity", "key"
+            )
+            if ret is not None:
+                return eval(ret.value)
+        return None
+
+    def set_tensor_sparsity(self, tensor_name, sparsity_dict):
+        """Sets the sparsity annotation of a tensor with given name."""
+        graph = self._model_proto.graph
+        qnt_annotations = graph.quantization_annotation
+        ret = util.get_by_name(qnt_annotations, tensor_name, "tensor_name")
+        if ret is not None:
+            ret_ts = util.get_by_name(
+                ret.quant_parameter_tensor_names, "tensor_sparsity", "key"
+            )
+            if ret_ts is not None:
+                ret_ts.value = str(sparsity_dict)
+            else:
+                ts = onnx.StringStringEntryProto()
+                ts.key = "tensor_sparsity"
+                ts.value = str(sparsity_dict)
+                ret.quant_parameter_tensor_names.append(ts)
+        else:
+            qa = onnx.TensorAnnotation()
+            dt = onnx.StringStringEntryProto()
+            dt.key = "tensor_sparsity"
+            dt.value = str(sparsity_dict)
+            qa.tensor_name = tensor_name
+            qa.quant_parameter_tensor_names.append(dt)
+            qnt_annotations.append(qa)
diff --git a/src/finn/core/onnx_exec.py b/src/finn/core/onnx_exec.py
index 172ba25b223fd087df134add460a42d0a9935e0e..85b52c0f33baac609b4dad4df59f8442f737ffc2 100644
--- a/src/finn/core/onnx_exec.py
+++ b/src/finn/core/onnx_exec.py
@@ -38,9 +38,11 @@ from finn.core.modelwrapper import ModelWrapper
 from finn.core.remote_exec import remote_exec
 from finn.core.rtlsim_exec import rtlsim_exec
 from finn.custom_op.registry import getCustomOp
+import finn.analysis.topology as ta
+from finn.util.basic import sanitize_quant_values, get_sanitize_quant_tensors
 
 
-def execute_node(node, context, graph):
+def execute_node(node, context, graph, return_full_exec_context=False):
     """Executes a single node by using onnxruntime, with custom function or
     if dataflow partition by using remote execution or rtlsim.
 
@@ -49,8 +51,29 @@ def execute_node(node, context, graph):
     if node.op_type == "StreamingDataflowPartition":
         sdp_node = getCustomOp(node)
         model = ModelWrapper(sdp_node.get_nodeattr("model"))
-        ret = execute_onnx(model, context, True)
-        context.update(ret)
+        inp_ctx = dict(filter(lambda x: x[0] in node.input, context.items()))
+        # input may have been renamed in partition
+        assert len(inp_ctx) == 1
+        old_iname = node.input[0]
+        new_iname = model.graph.input[0].name
+        if old_iname != new_iname:
+            inp_ctx[new_iname] = inp_ctx[old_iname]
+            del inp_ctx[old_iname]
+        ret = execute_onnx(model, inp_ctx, return_full_exec_context)
+        # if the model was in ip-stitched rtlsim mode, may get annotation
+        # for numbet of elapsed cycles, save again
+        if model.get_metadata_prop("exec_mode") == "rtlsim":
+            model.save(sdp_node.get_nodeattr("model"))
+        # output may have been renamed in partition
+        assert len(model.graph.output) == 1
+        node_oname = node.output[0]
+        model_oname = model.graph.output[0].name
+        context[node_oname] = ret[model_oname]
+        # prefix and insert exec context entries
+        if return_full_exec_context:
+            for tname in ret.keys():
+                if tname != model_oname:
+                    context[node.name + "_" + tname] = ret[tname]
     else:
         if node.domain == "finn":
 
@@ -101,15 +124,14 @@ def execute_node(node, context, graph):
                     raise Exception(
                         """Output shapes disagree after node execution:
                         found %s vs expected %s"""
-                        % (
-                            str(output_list[list_ind].shape.shape),
-                            str(context[outp].shape),
-                        )
+                        % (str(output_list[list_ind].shape), str(context[outp].shape))
                     )
                 context[outp] = output_list[list_ind]
 
 
-def execute_onnx(model, input_dict, return_full_exec_context=False):
+def execute_onnx(
+    model, input_dict, return_full_exec_context=False, start_node=None, end_node=None
+):
     """Executes given ONNX ModelWrapper with given named inputs.
 
     If return_full_exec_context is False, a dict of named outputs is returned
@@ -117,10 +139,20 @@ def execute_onnx(model, input_dict, return_full_exec_context=False):
 
     If return return_full_exec_context is True, the full set of tensors used by
     the execution (including inputs, weights, activations and final outputs)
-    will be returned as a dict."""
+    will be returned as a dict.
+
+    When start_node and end_node are set to None, the whole graph is executed.
+    If they are set to particular ONNX nodes, only the subgraph between (and
+    including) those nodes is executed.
+    """
 
     if not model.check_all_tensor_shapes_specified():
         raise Exception("Found unspecified tensor shapes, try infer_shapes")
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert (
+        ret["nodes_topologically_sorted"] is True
+    ), """Nodes must be
+    topologically sorted."""
 
     graph = model.graph
     # first, we need to make sure that every variable required by the graph has
@@ -155,8 +187,28 @@ def execute_onnx(model, input_dict, return_full_exec_context=False):
         # execute the model node by node
         # we can simply walk down the list since the ONNX spec guarantees that it is
         # topologically sorted
-        for node in graph.node:
-            execute_node(node, execution_context, graph)
+        subgraph = []
+        if start_node is None:
+            start_node = model.graph.node[0]
+        if end_node is None:
+            end_node = model.graph.node[-1]
+        # select the nodes between specified start/end nodes
+        start_ind = model.get_node_index(start_node)
+        end_ind = model.get_node_index(end_node) + 1
+        assert end_ind >= start_ind, "Start/end nodes must define valid subgraph"
+        subgraph = graph.node[start_ind:end_ind]
+        for node in subgraph:
+            if get_sanitize_quant_tensors() != 0:
+                # round input values to match quantization annotation
+                execution_context = sanitize_quant_values(
+                    model, node.input, execution_context
+                )
+            execute_node(node, execution_context, graph, return_full_exec_context)
+            if get_sanitize_quant_tensors() != 0:
+                # round output values to quantization annotation
+                execution_context = sanitize_quant_values(
+                    model, node.output, execution_context
+                )
     elif model_exec_mode == "remote_pynq":
         # use remote exec metadata built into model to execute on a remote PYNQ
         remote_exec(model, execution_context)
diff --git a/src/finn/core/remote_exec.py b/src/finn/core/remote_exec.py
index 335dfec04e4abee41f914c5d912ce291a0d31a91..2e139065ec0eff8cdbdb402f80113b039deed4da 100644
--- a/src/finn/core/remote_exec.py
+++ b/src/finn/core/remote_exec.py
@@ -28,7 +28,7 @@
 
 import os
 import subprocess
-
+import warnings
 import numpy as np
 
 
@@ -43,15 +43,35 @@ def remote_exec(model, execution_context):
     pynq_password = model.get_metadata_prop("pynq_password")
     pynq_target_dir = model.get_metadata_prop("pynq_target_dir")
     deployment_dir = model.get_metadata_prop("pynq_deploy_dir")
+    platform = model.get_metadata_prop("platform")
+    assert platform in ["alveo", "zynq-iodma"]
+    bitfile = model.get_metadata_prop("bitfile")
+    bitfile = os.path.basename(bitfile)
+    if pynq_password == "":
+        if "zynq" in platform:
+            raise Exception("PYNQ board remote exec needs password for sudo")
+        else:
+            local_prefix = ""  # assume we are using an ssh key
+            warnings.warn("Empty password, make sure you've set up an ssh key")
+    else:
+        local_prefix = "sshpass -p %s " % pynq_password
+
+    if platform == "alveo":
+        # Alveo can run without sudo
+        remote_prefix = ""
+    elif "zynq" in platform:
+        # PYNQ Zynq boards need to execute with sudo
+        remote_prefix = "echo %s | sudo -S " % pynq_password
+
     inp = execution_context[model.graph.input[0].name]
     # make copy of array before saving it
     inp = inp.copy()
+    batchsize = inp.shape[0]
     np.save(os.path.join(deployment_dir, "input.npy"), inp)
     # extracting last folder of absolute path (deployment_dir)
     deployment_folder = os.path.basename(os.path.normpath(deployment_dir))
     # copy input to PYNQ board
-    cmd = "sshpass -p {} scp -P{} -r {}/input.npy {}@{}:{}/{}".format(
-        pynq_password,
+    cmd = local_prefix + "scp -P{} -r {}/input.npy {}@{}:{}/{}".format(
         pynq_port,
         deployment_dir,
         pynq_username,
@@ -60,27 +80,31 @@ def remote_exec(model, execution_context):
         deployment_folder,
     )
     bash_command = ["/bin/bash", "-c", cmd]
-    process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_compile.communicate()
+    process_scp_in = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+    process_scp_in.communicate()
+
+    # use platform attribute for correct remote execution
+    if platform == "alveo":
+        remote_cmd = "bash -ic 'bash alveo_run.sh execute %d' \"" % batchsize
+    else:
+        remote_cmd = (
+            "python3.6 driver.py --exec_mode=execute --batchsize={} "
+            "--bitfile={} --inputfile=input.npy --outputfile=output.npy "
+            '--platform={} "'
+        ).format(batchsize, bitfile, platform)
     cmd = (
-        "sshpass -p {} ssh {}@{} -p {} "
-        '"cd {}/{}; echo "{}" | '
-        'sudo -S python3.6 driver.py --exec_mode="execute" --batchsize=1" '
-        '--bitfile="resizer.bit" --inputfile="input.npy" --outputfile="output.npy"'
-    ).format(
-        pynq_password,
-        pynq_username,
-        pynq_ip,
-        pynq_port,
-        pynq_target_dir,
-        deployment_folder,
-        pynq_password,
-    )
+        local_prefix + 'ssh {}@{} -p {} "cd {}/{}; ' + remote_prefix + remote_cmd
+    ).format(pynq_username, pynq_ip, pynq_port, pynq_target_dir, deployment_folder)
     bash_command = ["/bin/bash", "-c", cmd]
-    process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_compile.communicate()
-    cmd = "sshpass -p {} scp -P{} {}@{}:{}/{}/output.npy {}".format(
-        pynq_password,
+    process_exec_accel = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+    process_exec_accel.communicate()
+    # remove stale output file from local dir, if any
+    try:
+        os.remove("{}/output.npy".format(deployment_dir))
+    except FileNotFoundError:
+        pass
+    # copy generated output to local
+    cmd = local_prefix + "scp -P{} {}@{}:{}/{}/output.npy {}".format(
         pynq_port,
         pynq_username,
         pynq_ip,
@@ -89,7 +113,7 @@ def remote_exec(model, execution_context):
         deployment_dir,
     )
     bash_command = ["/bin/bash", "-c", cmd]
-    process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_compile.communicate()
+    process_scp_out = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+    process_scp_out.communicate()
     outp = np.load("{}/output.npy".format(deployment_dir))
     execution_context[model.graph.output[0].name] = outp
diff --git a/src/finn/core/rtlsim_exec.py b/src/finn/core/rtlsim_exec.py
index e5e6d29bd8d8ed23f6a4958856ed1ddea3617175..d83bcd3a75dd0d2fc02315c72784e57348901a04 100644
--- a/src/finn/core/rtlsim_exec.py
+++ b/src/finn/core/rtlsim_exec.py
@@ -66,6 +66,11 @@ def rtlsim_exec(model, execution_context):
     i_stream_w = first_node.get_instream_width()
     # convert input into time multiplexed shape
     i_folded_shape = first_node.get_folded_input_shape()
+    batchsize = i_tensor.shape[0]
+    # override batch size for input
+    i_folded_shape = list(i_folded_shape)
+    i_folded_shape[0] = batchsize
+    i_folded_shape = tuple(i_folded_shape)
     # TODO any other layout transformations need to happen here!
     i_tensor = i_tensor.reshape(i_folded_shape)
     # extract output shape
@@ -74,24 +79,30 @@ def rtlsim_exec(model, execution_context):
     o_dt = model.get_tensor_datatype(o_name)
     last_node = getCustomOp(model.find_producer(o_name))
     o_folded_shape = last_node.get_folded_output_shape()
+    # override batch size from actual input
+    o_shape = list(o_shape)
+    o_shape[0] = batchsize
+    o_shape = tuple(o_shape)
+    o_folded_shape = list(o_folded_shape)
+    o_folded_shape[0] = batchsize
+    o_folded_shape = tuple(o_folded_shape)
     o_stream_w = last_node.get_outstream_width()
     packedBits = o_stream_w
     targetBits = o_dt.bitwidth()
     # pack input
     packed_input = npy_to_rtlsim_input(i_tensor, i_dt, i_stream_w)
     num_out_values = last_node.get_number_output_values()
+    num_out_values *= batchsize
     # prepare pyverilator model
     rtlsim_so = model.get_metadata_prop("rtlsim_so")
     if (rtlsim_so is None) or (not os.path.isfile(rtlsim_so)):
         sim = pyverilate_stitched_ip(model)
         model.set_metadata_prop("rtlsim_so", sim.lib._name)
     else:
-        sim = PyVerilator(rtlsim_so)
-    _reset_rtlsim(sim)
-    _toggle_clk(sim)
+        sim = PyVerilator(rtlsim_so, auto_eval=False)
     ret = _run_rtlsim(sim, packed_input, num_out_values, trace_file)
     packed_output = ret[0]
-    model.set_metadata_prop("sim_cycles", str(ret[1]))
+    model.set_metadata_prop("cycles_rtlsim", str(ret[1]))
     # unpack output and put into context
     o_folded_tensor = rtlsim_output_to_npy(
         packed_output, None, o_dt, o_folded_shape, packedBits, targetBits
@@ -103,19 +114,23 @@ def rtlsim_exec(model, execution_context):
 def _reset_rtlsim(sim):
     """Sets reset input in pyverilator to zero, toggles the clock and set it
     back to one"""
-    sim.io.ap_rst_n_0 = 0
-    sim.io.ap_clk_0 = 1
-    sim.io.ap_clk_0 = 0
-    sim.io.ap_rst_n_0 = 1
+    sim.io.ap_rst_n = 0
+    _toggle_clk(sim)
+    _toggle_clk(sim)
+    sim.io.ap_rst_n = 1
+    _toggle_clk(sim)
+    _toggle_clk(sim)
 
 
 def _toggle_clk(sim):
     """Toggles the clock input in pyverilator once."""
-    sim.io.ap_clk_0 = 1
-    sim.io.ap_clk_0 = 0
+    sim.io.ap_clk = 0
+    sim.eval()
+    sim.io.ap_clk = 1
+    sim.eval()
 
 
-def _run_rtlsim(sim, inp, num_out_values, trace_file=None):
+def _run_rtlsim(sim, inp, num_out_values, trace_file=None, reset=True):
     """Runs the pyverilator simulation by passing the input values to the simulation,
     toggle the clock and observing the execution time. Argument num_out_values contains
     the number of expected output values, so the simulation is closed after all
@@ -125,7 +140,7 @@ def _run_rtlsim(sim, inp, num_out_values, trace_file=None):
     from finn.util.fpgadataflow)"""
     inputs = inp
     outputs = []
-    sim.io.out_r_0_tready = 1
+    sim.io.m_axis_0_tready = 1
 
     # observe if output is completely calculated
     # observation_count will contain the number of cycles the calculation ran
@@ -140,22 +155,23 @@ def _run_rtlsim(sim, inp, num_out_values, trace_file=None):
 
     if trace_file is not None:
         sim.start_vcd_trace(trace_file)
+    if reset:
+        _reset_rtlsim(sim)
 
     while not (output_observed):
-        sim.io.in0_V_V_0_tvalid = 1 if len(inputs) > 0 else 0
-        sim.io.in0_V_V_0_tdata = inputs[0] if len(inputs) > 0 else 0
-        if sim.io.in0_V_V_0_tready == 1 and sim.io.in0_V_V_0_tvalid == 1:
+        sim.io.s_axis_0_tvalid = 1 if len(inputs) > 0 else 0
+        sim.io.s_axis_0_tdata = inputs[0] if len(inputs) > 0 else 0
+        if sim.io.s_axis_0_tready == 1 and sim.io.s_axis_0_tvalid == 1:
             inputs = inputs[1:]
-        if sim.io.out_r_0_tvalid == 1 and sim.io.out_r_0_tready == 1:
-            outputs = outputs + [sim.io.out_r_0_tdata]
-        sim.io.ap_clk_0 = 1
-        sim.io.ap_clk_0 = 0
+        if sim.io.m_axis_0_tvalid == 1 and sim.io.m_axis_0_tready == 1:
+            outputs = outputs + [sim.io.m_axis_0_tdata]
+        _toggle_clk(sim)
 
         observation_count = observation_count + 1
         no_change_count = no_change_count + 1
 
         if len(outputs) == num_out_values:
-            sim_cycles = observation_count
+            cycles_rtlsim = observation_count
             output_observed = True
 
         if no_change_count == liveness_threshold:
@@ -175,4 +191,4 @@ def _run_rtlsim(sim, inp, num_out_values, trace_file=None):
         sim.flush_vcd_trace()
         sim.stop_vcd_trace()
 
-    return (outputs, sim_cycles)
+    return (outputs, cycles_rtlsim)
diff --git a/src/finn/core/throughput_test.py b/src/finn/core/throughput_test.py
index c82d540e29fc59b92a22bf011e823a9f8c076843..1306edfa23a9b25de41d0592796b4a03ad4e6508 100644
--- a/src/finn/core/throughput_test.py
+++ b/src/finn/core/throughput_test.py
@@ -28,12 +28,17 @@
 
 import os
 import subprocess
+import numpy as np
+import warnings
+from finn.util.basic import gen_finn_dt_tensor
+from finn.core.rtlsim_exec import rtlsim_exec
 
 
-def throughput_test(model):
+def throughput_test_remote(model, batchsize=1000):
     """Runs the throughput test for the given model remotely on the pynq board.
     The metadata properties related to the pynq board have to be set.
-    Returns a dictionary with results of the throughput test"""
+    Returns a dictionary with results of the throughput test. Returns None
+    if the test fails."""
 
     pynq_ip = model.get_metadata_prop("pynq_ip")
     pynq_port = int(model.get_metadata_prop("pynq_port"))
@@ -43,26 +48,49 @@ def throughput_test(model):
     deployment_dir = model.get_metadata_prop("pynq_deploy_dir")
     # extracting last folder of absolute path (deployment_dir)
     deployment_folder = os.path.basename(os.path.normpath(deployment_dir))
+    platform = model.get_metadata_prop("platform")
+    assert platform in ["alveo", "zynq-iodma"]
+    bitfile = model.get_metadata_prop("bitfile")
+    bitfile = os.path.basename(bitfile)
+    if pynq_password == "":
+        if "zynq" in platform:
+            raise Exception("PYNQ board remote exec needs password for sudo")
+        else:
+            local_prefix = ""  # assume we are using an ssh key
+            warnings.warn("Empty password, make sure you've set up an ssh key")
+    else:
+        local_prefix = "sshpass -p %s " % pynq_password
 
+    if platform == "alveo":
+        # Alveo can run without sudo but needs correct environment
+        remote_prefix = "conda activate finn-pynq-alveo; "
+    elif "zynq" in platform:
+        # PYNQ Zynq boards need to execute with sudo
+        remote_prefix = "echo %s | sudo -S " % pynq_password
+
+    # use platform attribute for correct remote execution
+    if platform == "alveo":
+        remote_cmd = "bash -ic 'bash alveo_run.sh throughput_test %d' \"" % batchsize
+    else:
+        remote_cmd = (
+            "python3.6 driver.py --exec_mode=throughput_test --batchsize={} "
+            "--bitfile={} --inputfile=input.npy --outputfile=output.npy "
+            '--platform={} "'
+        ).format(batchsize, bitfile, platform)
     cmd = (
-        "sshpass -p {} ssh {}@{} -p {} "
-        '"cd {}/{}; echo "{}" | '
-        'sudo -S python3.6 driver.py --exec_mode="throughput_test" --batchsize=1000"'
-    ).format(
-        pynq_password,
-        pynq_username,
-        pynq_ip,
-        pynq_port,
-        pynq_target_dir,
-        deployment_folder,
-        pynq_password,
-    )
+        local_prefix + 'ssh {}@{} -p {} "cd {}/{}; ' + remote_prefix + remote_cmd
+    ).format(pynq_username, pynq_ip, pynq_port, pynq_target_dir, deployment_folder)
     bash_command = ["/bin/bash", "-c", cmd]
-    process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_compile.communicate()
+    process_throughput_test = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+    process_throughput_test.communicate()
 
-    cmd = "sshpass -p {} scp -P{} {}@{}:{}/{}/nw_metrics.txt {}".format(
-        pynq_password,
+    # remove any pre-existing metrics file
+    try:
+        os.remove("{}/nw_metrics.txt".format(deployment_dir))
+    except FileNotFoundError:
+        pass
+
+    cmd = local_prefix + "scp -P{} {}@{}:{}/{}/nw_metrics.txt {}".format(
         pynq_port,
         pynq_username,
         pynq_ip,
@@ -74,7 +102,56 @@ def throughput_test(model):
     process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
     process_compile.communicate()
 
-    with open("{}/nw_metrics.txt".format(deployment_dir), "r") as file:
-        res = eval(file.read())
+    try:
+        with open("{}/nw_metrics.txt".format(deployment_dir), "r") as file:
+            res = eval(file.read())
+        return res
+    except FileNotFoundError:
+        return None
+
+
+def throughput_test_rtlsim(model, batchsize=100):
+    """Runs a throughput test for the given IP-stitched model. When combined
+    with tracing, useful to determine bottlenecks and required FIFO sizes."""
+
+    assert (
+        model.get_metadata_prop("exec_mode") == "rtlsim"
+    ), """Top-level exec_mode
+    metadata_prop must be set to rtlsim"""
+
+    # create random input
+    iname = model.graph.input[0].name
+    ishape = model.get_tensor_shape(iname)
+    ishape_batch = ishape
+    ishape_batch[0] = batchsize
+    idt = model.get_tensor_datatype(iname)
+    dummy_input = gen_finn_dt_tensor(idt, ishape_batch)
+    # compute input/output sizes
+    oname = model.graph.output[0].name
+    oshape = model.get_tensor_shape(oname)
+    oshape_batch = oshape
+    oshape_batch[0] = batchsize
+    odt = model.get_tensor_datatype(oname)
+    i_bytes = (np.prod(ishape_batch) * idt.bitwidth()) / 8
+    o_bytes = (np.prod(oshape_batch) * odt.bitwidth()) / 8
+    # make empty exec context and insert input
+    ctx = model.make_empty_exec_context()
+    ctx[iname] = dummy_input
+    # remove liveness threshold, launch rtlsim
+    os.environ["LIVENESS_THRESHOLD"] = "-1"
+    rtlsim_exec(model, ctx)
+    # extract metrics
+    cycles = int(model.get_metadata_prop("cycles_rtlsim"))
+    clk_ns = float(model.get_metadata_prop("clk_ns"))
+    fclk_mhz = 1 / (clk_ns * 0.001)
+    runtime_s = (cycles * clk_ns) * (10 ** -9)
+    res = dict()
+    res["cycles"] = cycles
+    res["runtime[ms]"] = runtime_s * 1000
+    res["throughput[images/s]"] = batchsize / runtime_s
+    res["DRAM_in_bandwidth[Mb/s]"] = i_bytes * 0.000001 / runtime_s
+    res["DRAM_out_bandwidth[Mb/s]"] = o_bytes * 0.000001 / runtime_s
+    res["fclk[mhz]"] = fclk_mhz
+    res["N"] = batchsize
 
     return res
diff --git a/src/finn/custom_op/__init__.py b/src/finn/custom_op/__init__.py
index ab6e03bee65b8bf5c4041dd8021b1a561e7673d2..4ae7b9ebffaab6ca6be04b8d73f647b2db22dc78 100644
--- a/src/finn/custom_op/__init__.py
+++ b/src/finn/custom_op/__init__.py
@@ -56,8 +56,15 @@ class CustomOp(ABC):
                     ret = ret.decode("utf-8")
                 return ret
             else:
-                # not set, return default value
-                return def_val
+                if req:
+                    raise Exception(
+                        """Required attribute %s unspecified in
+                    a %s node"""
+                        % (name, self.onnx_node.op_type)
+                    )
+                else:
+                    # not set, return default value
+                    return def_val
         except KeyError:
             raise AttributeError("Op has no such attribute: " + name)
 
diff --git a/src/finn/custom_op/debugmarker.py b/src/finn/custom_op/debugmarker.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c02f0dc81295dfc5c3060d549b6853eac1d0bac
--- /dev/null
+++ b/src/finn/custom_op/debugmarker.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from finn.custom_op import CustomOp
+from onnx import helper
+
+
+class DebugMarker(CustomOp):
+    def get_nodeattr_types(self):
+        return {"export_debug_name": ("s", True, "")}
+
+    def make_shape_compatible_op(self, model):
+        node = self.onnx_node
+        return helper.make_node("Identity", [node.input[0]], [node.output[0]])
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # data type stays the same
+        dtype = model.get_tensor_datatype(node.input[0])
+        model.set_tensor_datatype(node.output[0], dtype)
+        # create quantization annotation for debug marker
+        model.set_tensor_datatype(self.get_nodeattr("export_debug_name"), dtype)
+
+    def execute_node(self, context, graph):
+        node = self.onnx_node
+        inp_name = node.input[0]
+        out_name = node.output[0]
+        inp = context[inp_name]
+        context[out_name] = inp
+        # insert debug marker output as separate tensor
+        context[self.get_nodeattr("export_debug_name")] = inp
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+        return info_messages
diff --git a/src/finn/custom_op/fpgadataflow/__init__.py b/src/finn/custom_op/fpgadataflow/__init__.py
index d47b687b65d93ec45d936afd91c08c117cf8dbc8..7de6cce936ee54d58d9a526e926ff79dcd35b90d 100644
--- a/src/finn/custom_op/fpgadataflow/__init__.py
+++ b/src/finn/custom_op/fpgadataflow/__init__.py
@@ -40,6 +40,7 @@ from finn.util.basic import (
 from finn.util.fpgadataflow import (
     IPGenBuilder,
     pyverilate_get_liveness_threshold_cycles,
+    rtlsim_multi_io,
 )
 from . import templates
 
@@ -81,12 +82,15 @@ class HLSCustomOp(CustomOp):
             "ip_path": ("s", False, ""),
             "ip_vlnv": ("s", False, ""),
             "exec_mode": ("s", False, ""),
-            "sim_cycles": ("i", False, 0),
+            "cycles_rtlsim": ("i", False, 0),
+            "cycles_estimate": ("i", False, 0),
             "rtlsim_trace": ("s", False, ""),
             "res_estimate": ("s", False, ""),
             "res_hls": ("s", False, ""),
             "res_synth": ("s", False, ""),
             "rtlsim_so": ("s", False, ""),
+            # partitioning info
+            "partition_id": ("i", False, 0),
             # input and output FIFO depths
             "inFIFODepth": ("i", False, 2),
             "outFIFODepth": ("i", False, 2),
@@ -99,6 +103,23 @@ class HLSCustomOp(CustomOp):
         prefixed_top_name = "%s_%s" % (node.name, node.name)
         return prefixed_top_name
 
+    def get_verilog_top_module_intf_names(self):
+        """Return a dict of names of input and output interfaces.
+        The keys reflect the protocols each interface implements:
+        'clk', 'rst', 'm_axis', 's_axis', 'aximm', 'axilite'.
+        Values are lists of names:
+        's_axis' names correspond to the list of node inputs in order,
+        'm_axis' names correspond to the list of node outputs in order'
+        Each block must have at most one aximm and one axilite."""
+        intf_names = {}
+        intf_names["clk"] = ["ap_clk"]
+        intf_names["rst"] = ["ap_rst_n"]
+        intf_names["s_axis"] = ["in0_V_V"]
+        intf_names["m_axis"] = ["out_V_V"]
+        intf_names["aximm"] = []
+        intf_names["axilite"] = []
+        return intf_names
+
     def get_verilog_top_filename(self):
         "Return the Verilog top module filename for this node."
 
@@ -109,6 +130,31 @@ class HLSCustomOp(CustomOp):
         )
         return verilog_file
 
+    def get_all_verilog_paths(self):
+        "Return list of all folders containing Verilog code for this node."
+
+        code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        assert (
+            code_gen_dir != ""
+        ), """Node attribute "code_gen_dir_ipgen" is
+        not set. Please run HLSSynthIP first."""
+        verilog_path = "{}/project_{}/sol1/impl/verilog/".format(
+            code_gen_dir, self.onnx_node.name
+        )
+        # default impl only returns the HLS verilog codegen dir
+        return [verilog_path]
+
+    def get_all_verilog_filenames(self):
+        "Return list of all Verilog files used for this node."
+
+        verilog_files = []
+        verilog_paths = self.get_all_verilog_paths()
+        for verilog_path in verilog_paths:
+            for f in os.listdir(verilog_path):
+                if f.endswith(".v"):
+                    verilog_files += [f]
+        return verilog_files
+
     def prepare_rtlsim(self):
         """Creates a Verilator emulation library for the RTL code generated
         for this node, sets the rtlsim_so attribute to its path and returns
@@ -116,24 +162,15 @@ class HLSCustomOp(CustomOp):
 
         if PyVerilator is None:
             raise ImportError("Installation of PyVerilator is required.")
-        # ensure that code is generated
-        code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
-        assert (
-            code_gen_dir != ""
-        ), """Node attribute "code_gen_dir_ipgen" is
-        not set. Please run HLSSynthIP first."""
-        verilog_file = self.get_verilog_top_filename()
-        assert os.path.isfile(verilog_file), "Cannot find top-level Verilog file."
+        verilog_paths = self.get_all_verilog_paths()
+        verilog_files = self.get_all_verilog_filenames()
         # build the Verilator emu library
         sim = PyVerilator.build(
-            verilog_file,
+            verilog_files,
             build_dir=make_build_dir("pyverilator_" + self.onnx_node.name + "_"),
-            verilog_path=[
-                "{}/project_{}/sol1/impl/verilog/".format(
-                    code_gen_dir, self.onnx_node.name
-                )
-            ],
+            verilog_path=verilog_paths,
             trace_depth=get_rtlsim_trace_depth(),
+            top_module_name=self.get_verilog_top_module_name(),
         )
         # save generated lib filename in attribute
         self.set_nodeattr("rtlsim_so", sim.lib._name)
@@ -154,9 +191,15 @@ class HLSCustomOp(CustomOp):
         of the node as a dictionary."""
         ret = dict()
         ret["BRAM_18K"] = self.bram_estimation()
+        ret["BRAM_efficiency"] = self.bram_efficiency_estimation()
         ret["LUT"] = self.lut_estimation()
         return ret
 
+    def bram_efficiency_estimation(self):
+        """Function for BRAM efficiency estimation: actual parameter storage
+        needed divided by the allocated BRAM storage (from estimation)"""
+        return 1
+
     def bram_estimation(self):
         """Function for BRAM resource estimation, is member function of
         HLSCustomOp class but has to be filled by every node"""
@@ -167,6 +210,12 @@ class HLSCustomOp(CustomOp):
         HLSCustomOp class but has to be filled by every node"""
         return 0
 
+    def get_exp_cycles(self):
+        """Function for estimation of expected cycles for set folding,
+        is member function of HLSCustomOp class but has to be filled
+        by every node"""
+        return 0
+
     def code_generation_ipgen(self, model, fpgapart, clk):
         """Generates c++ code and tcl script for ip generation."""
         node = self.onnx_node
@@ -259,6 +308,12 @@ class HLSCustomOp(CustomOp):
         f.close()
         self.code_gen_dict.clear()
 
+    def code_generation_ipi(self):
+        """Constructs and returns the TCL for node instantiation in Vivado IPI."""
+        vlnv = self.get_nodeattr("ip_vlnv")
+        cmd = ["create_bd_cell -type ip -vlnv %s %s" % (vlnv, self.onnx_node.name)]
+        return cmd
+
     def compile_singlenode_code(self):
         """Builds the bash script for compilation using the CppBuilder from
         finn.util.basic and executes the script to produce the executable."""
@@ -302,14 +357,24 @@ Found no codegen dir for this node, did you run the prepare_cppsim transformatio
             )
 
     def npy_to_dynamic_output(self, context):
-        """Reads the output from a .npy file and saves it at the right place in
-        the context dictionary."""
-        # TODO support multi-output nodes as needed
+        """Reads the output from an output.npy file generated from cppsim and
+        places its content into the context dictionary."""
         node = self.onnx_node
         code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
         output = np.load("{}/output.npy".format(code_gen_dir))
         context[node.output[0]] = output
 
+    def npy_to_dynamic_outputs(self, context, npy_list):
+        """Reads the output from .npy files generated from cppsim and places
+        their content into the context dictionary.
+        npy_list is a list specifying which files to read, and its order must
+        match the order of node outputs."""
+        node = self.onnx_node
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        for i in range(len(npy_list)):
+            output = np.load("{}/{}".format(code_gen_dir, npy_list[i]))
+            context[node.output[i]] = output
+
     def exec_precompiled_singlenode_model(self):
         """Executes precompiled executable."""
         executable_path = self.get_nodeattr("executable_path")
@@ -336,7 +401,7 @@ compilation transformations?
         sim.io.ap_clk = 1
         sim.io.ap_clk = 0
 
-    def rtlsim(self, sim, inp):
+    def rtlsim(self, sim, inp, inp2=None):
         """Runs the pyverilator simulation by passing the input values to the simulation,
         toggle the clock and observing the execution time. Function contains also an
         observation loop that can abort the simulation if no output value is produced
@@ -368,6 +433,13 @@ compilation transformations?
             sim.io.in0_V_V_TDATA = inputs[0] if len(inputs) > 0 else 0
             if sim.io.in0_V_V_TREADY == 1 and sim.io.in0_V_V_TVALID == 1:
                 inputs = inputs[1:]
+
+            if inp2 is not None:
+                sim.io.in1_V_V_TVALID = 1 if len(inp2) > 0 else 0
+                sim.io.in1_V_V_TDATA = inp2[0] if len(inp2) > 0 else 0
+                if sim.io.in1_V_V_TREADY == 1 and sim.io.in1_V_V_TVALID == 1:
+                    inp2 = inp2[1:]
+
             if sim.io.out_V_V_TVALID == 1 and sim.io.out_V_V_TREADY == 1:
                 outputs = outputs + [sim.io.out_V_V_TDATA]
             sim.io.ap_clk = 1
@@ -377,7 +449,7 @@ compilation transformations?
             no_change_count = no_change_count + 1
 
             if len(outputs) == num_out_values:
-                self.set_nodeattr("sim_cycles", observation_count)
+                self.set_nodeattr("cycles_rtlsim", observation_count)
                 output_observed = True
 
             if no_change_count == liveness_threshold:
@@ -398,6 +470,16 @@ compilation transformations?
             sim.stop_vcd_trace()
         return outputs
 
+    def rtlsim_multi_io(self, sim, io_dict):
+        "Run rtlsim for this node, supports multiple i/o streams."
+
+        trace_file = self.get_nodeattr("rtlsim_trace")
+        if trace_file == "default":
+            trace_file = self.onnx_node.name + ".vcd"
+        num_out_values = self.get_number_output_values()
+        total_cycle_count = rtlsim_multi_io(sim, io_dict, num_out_values, trace_file)
+        self.set_nodeattr("cycles_rtlsim", total_cycle_count)
+
     def execute_node(self, context, graph):
         """Executes single node using cppsim or rtlsim."""
         mode = self.get_nodeattr("exec_mode")
diff --git a/src/finn/custom_op/fpgadataflow/addstreams_batch.py b/src/finn/custom_op/fpgadataflow/addstreams_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..14fb65739dab4208edd0c61bb7ca8ae2d114baab
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/addstreams_batch.py
@@ -0,0 +1,367 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+
+import numpy as np
+
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from onnx import TensorProto, helper
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+
+
+class AddStreams_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hlslib AddStreams_Batch function."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "NumChannels": ("i", True, ""),
+            "PE": ("i", True, ""),
+            # FINN DataTypes for inputs; output datatype inferred from input
+            "inputDataType": ("s", True, ""),
+            # number of input vectors, examples:
+            # [1] is a single vector (like a FC layer with batch=1)
+            # [4] is four vectors (like a FC layer with batch=4)
+            # [1, 4, 4] is four * four vectors (like a conv layer with batch=1)
+            "numInputVectors": ("ints", False, [1]),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_normal_input_shape(self):
+        ich = self.get_nodeattr("NumChannels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        ishape = tuple(vecs + [ich])
+        return ishape
+
+    def get_folded_input_shape(self):
+        ich = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        assert ich % pe == 0, "PE must divide NumChannels"
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        ishape = tuple(vecs + [ich // pe, pe])
+        return ishape
+
+    def get_normal_output_shape(self):
+        return self.get_normal_input_shape()
+
+    def get_folded_output_shape(self):
+        return self.get_folded_input_shape()
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpected input1 shape."
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[1]))
+        assert ishape == exp_ishape, "Unexpected input2 shape."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        # check input datatype against property
+        exp_idt_name = self.get_input_datatype().name
+        idt_name = self.get_nodeattr("inputDataType")
+        assert exp_idt_name == idt_name, "Bad input DataType for AddStreams layer"
+        # enforce output data type
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(self.onnx_node.output[0], odt)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify that all necessary attributes exist
+        try:
+            self.get_nodeattr("code_gen_dir_cppsim")
+            self.get_nodeattr("executable_path")
+            self.get_nodeattr("NumChannels")
+            self.get_nodeattr("PE")
+            self.get_nodeattr("inputDataType")
+            info_messages.append("All necessary attributes exist")
+        except Exception:
+            info_messages.append(
+                """The required LabelSelect_Batch attributes do not exist."""
+            )
+
+        return info_messages
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        # we need to set output datatype to the next larger int or uint
+        # enhancement: consider specifying w/ explicit outputDataType attribute
+        # to allow overflow and use the same idt if user wants
+        idt = DataType[self.get_nodeattr("inputDataType")]
+        if idt.signed():
+            return DataType.get_smallest_possible(2 * idt.min())
+        else:
+            return DataType.get_smallest_possible(2 * idt.max())
+
+    def get_instream_width(self):
+        """Returns input stream width."""
+        ibits = self.get_input_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        in_width = pe * ibits
+        return in_width
+
+    def get_outstream_width(self):
+        """Returns output stream width."""
+        obits = self.get_output_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        out_width = pe * obits
+        return out_width
+
+    def get_number_output_values(self):
+        return np.prod(self.get_folded_output_shape()[:-1])
+
+    def get_exp_cycles(self):
+        # Channels/PE * batch size * fmdim * fmdim
+        return np.prod(self.get_folded_output_shape()[:-1])
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_ishape = self.get_folded_input_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert (
+            inp.shape == exp_ishape
+        ), """Input0 shape doesn't match expected shape ."""
+        export_idt = self.get_input_datatype()
+        # reshape input into folded form
+        inp = inp.reshape(folded_ishape)
+        # make copy before saving array
+        reshaped_input = inp.copy()
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        # exact same thing for input1
+        inp = context[node.input[1]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert (
+            inp.shape == exp_ishape
+        ), """Input1 shape doesn't match expected shape ."""
+        export_idt = self.get_input_datatype()
+        # reshape input into folded form
+        inp = inp.reshape(folded_ishape)
+        # make copy before saving array
+        reshaped_input = inp.copy()
+        np.save(os.path.join(code_gen_dir, "input_1.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim did not produce expected folded output shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp0 = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            rtlsim_inp1 = npy_to_rtlsim_input(
+                "{}/input_1.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_output = self.rtlsim(sim, rtlsim_inp0, rtlsim_inp1)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                rtlsim_output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+            # load and reshape output
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output shape doesn't match expected shape."""
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "streamtools.h"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = []
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        self.code_gen_dict["$READNPYDATA$"] = []
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+        npy_in = "%s/input_1.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in1);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in1 ("in1");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        node = self.onnx_node
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """{}<{}, {}, {}, {}, {}> (in0, in1, out, 1);""".format(
+                node.op_type,
+                self.get_nodeattr("PE"),
+                self.get_input_datatype().get_hls_datatype_str(),
+                self.get_input_datatype().get_hls_datatype_str(),
+                self.get_output_datatype().get_hls_datatype_str(),
+                self.get_number_output_values(),
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}>> &in0, hls::stream<ap_uint<{}>> &in1,
+                hls::stream<ap_uint<{}>> &out)""".format(
+                self.onnx_node.name,
+                self.get_nodeattr("PE") * self.get_input_datatype().bitwidth(),
+                self.get_nodeattr("PE") * self.get_input_datatype().bitwidth(),
+                self.get_nodeattr("PE") * self.get_output_datatype().bitwidth(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=in1")
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+    def get_verilog_top_module_intf_names(self):
+        intf_names = super().get_verilog_top_module_intf_names()
+        intf_names["s_axis"] = ["in0_V_V", "in1_V_V"]
+        return intf_names
diff --git a/src/finn/custom_op/fpgadataflow/channelwise_op_batch.py b/src/finn/custom_op/fpgadataflow/channelwise_op_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..88b55aaec8fa834abe274b703a404b4419571401
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/channelwise_op_batch.py
@@ -0,0 +1,639 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from math import ceil
+import os
+
+import numpy as np
+
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from finn.util.data_packing import (
+    npy_to_rtlsim_input,
+    numpy_to_hls_code,
+    rtlsim_output_to_npy,
+)
+from . import templates
+
+import warnings
+
+# ONNX i/o tensor shape assumptions for channelwise ops:
+# input 0 is the input tensor, shape (..., NumChannels)
+# input 1 is the channelwise parameter tensor, shape (NumChannels, params_per_channel)
+# output 0 is the output tensor, shape (..., NumChannels) - same as input
+# the ... here can be any shape (representing groups of vectors)
+
+
+def get_smallest_possible(vals):
+    """Returns smallest (fewest bits) possible DataType that can represent
+    value. Prefers unsigned integers where possible."""
+    vals = np.array(vals)
+    for v in vals:
+        assert int(v) == v, "Error float value"
+
+    for k in DataType.__members__:
+        dt = DataType[k]
+
+        if dt in [DataType.BIPOLAR, DataType.TERNARY, DataType.FLOAT32]:
+            # not currently supported
+            continue
+
+        if (dt.min() <= vals).all() and (vals <= dt.max()).all():
+            return dt
+
+    warnings.warn(
+        """InferChannelwiseLinearLayer: Output values may not be
+    representable with supported data types.
+    Setting maximum width data type available.
+    This will lead to errors if there are no constrains on the input
+    """
+    )
+
+    if (0 <= vals).all():
+        return DataType.UINT64
+    else:
+        return DataType.INT64
+
+
+class ChannelwiseOp_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hls Thresholding_Batch function.
+    It can implement a variety of channel-wise parametrized operations,
+    including Add, Mul and multi-thresholding.
+    """
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+        self.decoupled_wrapper = templates.decoupled_wrapper
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            # channelwise "map" function to apply:
+            # one of cmp_le, cmp_ge, add, mul
+            "Func": ("s", False, "cmp_le"),
+            "PE": ("i", True, 0),
+            "NumChannels": ("i", True, 0),
+            # string defining memory resource type for parameters
+            "ram_style": ("s", False, "distributed"),
+            # FINN DataTypes for inputs, weights, outputs
+            "inputDataType": ("s", True, ""),
+            "paramDataType": ("s", True, ""),
+            "outputDataType": ("s", True, ""),
+            # input and output FIFO depths
+            "inFIFODepth": ("i", False, 0),
+            "outFIFODepth": ("i", False, 0),
+            # number of input vectors, examples:
+            # [1] is a single vector (like a FC layer with batch=1)
+            # [4] is four vectors (like a FC layer with batch=4)
+            # [1, 4, 4] is four * four vectors (like a conv layer with batch=1)
+            "numInputVectors": ("ints", False, [1]),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def calc_tmem(self):
+        """Calculates and returns TMEM, the depth of the memory used
+        to store the channelwise op parameters."""
+        chn = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        return chn // pe
+
+    def make_shape_compatible_op(self, model):
+        oshape = self.get_normal_output_shape()
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # check input datatype against property
+        idt = model.get_tensor_datatype(node.input[0])
+
+        exp_idt_name = self.get_nodeattr("inputDataType")
+        if exp_idt_name != idt.name:
+            func = self.get_nodeattr("Func")
+            assert func in ["add", "mul"], "Bad input DataType for ChannelwiseOp layer"
+
+            self.set_nodeattr("inputDataType", idt.name)
+            # update the func in ['add','mul'] cases
+
+            # get parameter ranges
+            param = model.get_initializer(node.input[1])
+            param_min = min(param.flatten())
+            param_max = max(param.flatten())
+
+            # set function and determine output data type
+            if func == "add":
+                out_min = idt.min() + param_min
+                out_max = idt.max() + param_max
+                odt = get_smallest_possible([out_min, out_max])
+            elif func == "mul":
+                possible_limits = []
+                possible_limits += [idt.min() * param_min]
+                possible_limits += [idt.min() * param_max]
+                possible_limits += [idt.max() * param_min]
+                possible_limits += [idt.max() * param_max]
+                odt = get_smallest_possible(possible_limits)
+
+            self.set_nodeattr("outputDataType", odt.name)
+
+        # set output datatype from property
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(node.output[0], odt)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify that all necessary attributes exist
+        # TODO collect automatically from get_nodeattr_types
+        try:
+            self.get_nodeattr("code_gen_dir_cppsim")
+            self.get_nodeattr("executable_path")
+            self.get_nodeattr("NumChannels")
+            self.get_nodeattr("PE")
+            self.get_nodeattr("inputDataType")
+            self.get_nodeattr("paramDataType")
+            self.get_nodeattr("outputDataType")
+            info_messages.append("All necessary attributes exist")
+        except Exception:
+            info_messages.append(
+                """The required Threshold_Batch attributes do not exist."""
+            )
+
+        return info_messages
+
+    def bram_estimation(self):
+        """Calculates BRAM cost if resource set to BRAM"""
+        style = self.get_nodeattr("ram_style")
+        P = self.get_nodeattr("PE")
+        idt = self.get_input_datatype()
+        A = idt.bitwidth()
+        tmem = self.calc_tmem()
+
+        if style == "block" and tmem > 1:
+            return int(ceil(A * P / 16)) * int(ceil(tmem / 1024))
+        else:
+            return 0
+
+    def lut_estimation(self):
+        """Calculates LUT cost, taking memory resource type into account """
+        # TODO add in/out FIFO contributions
+        style = self.get_nodeattr("ram_style")
+        P = self.get_nodeattr("PE")
+        idt = self.get_input_datatype()
+        A = idt.bitwidth()
+        tmem = self.calc_tmem()
+        # cost of comparators
+        comparator_cost = A * P
+        # cost of LUTRAM
+        if style == "distributed" and tmem > 1:
+            lutram_cost = P * A * int(ceil(tmem / 64))
+        else:
+            lutram_cost = 0
+        # total cost
+        return comparator_cost + lutram_cost
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        return DataType[self.get_nodeattr("outputDataType")]
+
+    def get_instream_width(self):
+        i_bits = self.get_input_datatype().bitwidth()
+        return i_bits * self.get_nodeattr("PE")
+
+    def get_outstream_width(self):
+        o_bits = self.get_output_datatype().bitwidth()
+        return o_bits * self.get_nodeattr("PE")
+
+    def get_folded_input_shape(self):
+        ich = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        fold = ich // pe
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        folded_input_shape = tuple(vecs + [fold, pe])
+        return folded_input_shape
+
+    def get_folded_output_shape(self):
+        # same shape as input
+        return self.get_folded_input_shape()
+
+    def get_normal_input_shape(self):
+        ich = self.get_nodeattr("NumChannels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        normal_input_shape = tuple(vecs + [ich])
+        return normal_input_shape
+
+    def get_normal_output_shape(self):
+        # same shape as input
+        return self.get_normal_input_shape()
+
+    def get_number_output_values(self):
+        nf = np.prod(self.get_folded_output_shape()[:-1])
+        return nf
+
+    def get_exp_cycles(self):
+        # Channels/PE * batch size * fmdim * fmdim
+        return np.prod(self.get_folded_output_shape()[:-1])
+
+    def get_template_param_values(self):
+        """Returns the template parameter values according to input, output and weight
+        data types."""
+        ret = dict()
+        inp_hls_str = self.get_input_datatype().get_hls_datatype_str()
+        out_hls_str = self.get_output_datatype().get_hls_datatype_str()
+        # fill in TSrcI
+        ret["TSrcI"] = "Slice<%s>" % inp_hls_str
+        # fill in TDstI
+        ret["TDstI"] = "Slice<%s>" % out_hls_str
+
+        return ret
+
+    def get_hls_compatible_parameter_tensor(self, orig_param_vector):
+        """Convert the original numpy weight matrix orig_weight_matrix into
+        a form suitable for passing to the hlslib call:
+        * ensure chn % PE == 0
+        * interleave rows between PEs
+        * reshape into (PE, TMEM) and return
+        """
+        chn = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        tmem = chn // pe
+        assert chn % pe == 0, "Requirement NumChannels divisable by PE is violated."
+        assert (
+            orig_param_vector.ndim == 1
+        ), """Parameter vector dimension is {}.
+        Expected dimension: 1.""".format(
+            orig_param_vector.ndim
+        )
+
+        # if not self.get_input_datatype().signed():
+        #     # ensure all thresholds are nonnegative
+        #     assert (orig_param_vector >= 0).all()
+
+        # ensure all thresholds are integer
+        assert (orig_param_vector.astype(np.int32) == orig_param_vector).all()
+        ret = orig_param_vector
+
+        assert (
+            ret.shape[0] == chn
+        ), "Cardinality of parameter vector is not as expected (chn)"
+
+        # distribute rows between PEs
+        ret = ret.reshape(tmem, pe).transpose()
+        assert (
+            ret.shape[0] == pe
+        ), """First dimension after distribution of the
+        rows between PEs is not as expected (pe)"""
+        assert (
+            ret.shape[1] == tmem
+        ), """Second dimension after distribution of the
+        rows between PEs is not as expected (tmem)"""
+
+        return ret.reshape(1, pe, tmem)
+
+    def generate_params(self, model, path):
+        code_gen_dir = path
+        # save thresholds in params.h
+        parameters = model.get_initializer(self.onnx_node.input[1])
+        parameter_tensor = self.get_hls_compatible_parameter_tensor(parameters)
+        pdt = DataType[self.get_nodeattr("paramDataType")]
+
+        parameters_hls_code = numpy_to_hls_code(
+            parameter_tensor, pdt, "parameters", False, True
+        )
+        # get input data type
+        export_idt = self.get_input_datatype()
+        if self.get_input_datatype() == DataType.BIPOLAR:
+            export_idt = DataType.BINARY
+        idt_hls = export_idt.get_hls_datatype_str()
+
+        # write parameters into params.h
+        f_params = open("{}/params.h".format(code_gen_dir), "w")
+        pdt_hls = pdt.get_hls_datatype_str()
+        # use binary to export bipolar activations
+        export_odt = self.get_output_datatype()
+        if self.get_output_datatype() == DataType.BIPOLAR:
+            export_odt = DataType.BINARY
+        odt_hls = export_odt.get_hls_datatype_str()
+        # get desired function
+        func = self.get_nodeattr("Func")
+        if func == "cmp_le":
+            func_str = "std::less_equal"
+        elif func == "cmp_ge":
+            func_str = "std::greater_equal"
+        elif func == "add":
+            func_str = "std::plus"
+        elif func == "mul":
+            func_str = "std::multiplies"
+        else:
+            raise Exception(
+                """Invalid value for attribute Func! Is currently set to: {}
+            has to be set to one of the following value
+            ("cmp_le", "cmp_ge", "add", "mul")""".format(
+                    func
+                )
+            )
+        f_params.write(
+            "static ChannelWiseOperation<{},{},{},{},{},{}> threshs \
+            = ".format(
+                self.calc_tmem(),
+                self.get_nodeattr("PE"),
+                idt_hls,
+                pdt_hls,
+                odt_hls,
+                "%s<%s>" % (func_str, odt_hls),
+            )
+        )
+        f_params.write(parameters_hls_code)
+        f_params.close()
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+
+        # TODO ensure codegen dir exists
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        # create a npy file fore each input of the node (in_ind is input index)
+        in_ind = 0
+        for inputs in node.input:
+            # it is assumed that the first input of the node is the data input
+            # the second input are the weights
+            # the third input are the thresholds
+            if in_ind == 0:
+                assert (
+                    str(context[inputs].dtype) == "float32"
+                ), """Input datatype is
+                not float32 as expected."""
+                expected_inp_shape = self.get_folded_input_shape()
+                reshaped_input = context[inputs].reshape(expected_inp_shape)
+                export_idt = self.get_input_datatype()
+                # make copy before saving the array
+                reshaped_input = reshaped_input.copy()
+                np.save(
+                    os.path.join(code_gen_dir, "input_{}.npy".format(in_ind)),
+                    reshaped_input,
+                )
+            elif in_ind > 2:
+                raise Exception("Unexpected input found for ChannelwiseOp_Batch")
+            in_ind += 1
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            # reinterpret binary output as bipolar where needed
+            if self.get_output_datatype() == DataType.BIPOLAR:
+                out = context[node.output[0]]
+                out = 2 * out - 1
+                context[node.output[0]] = out
+            assert (
+                context[node.output[0]].shape == self.get_folded_output_shape()
+            ), """Output shape is not as expected"""
+            # reshape output to have expected shape
+            oshape = self.get_normal_output_shape()
+            context[node.output[0]] = context[node.output[0]].reshape(*oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            output = self.rtlsim(sim, inp)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+
+            # load and reshape output
+            output = np.load(out_npy_path)
+            oshape = self.get_normal_output_shape()
+            output = np.asarray([output], dtype=np.float32).reshape(*oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "activations.hpp"']
+        self.code_gen_dict["$GLOBALS$"] += ['#include "params.h"']
+
+    # TODO check and add whatever missing
+    def defines(self, var):
+        numInputVectors = list(self.get_nodeattr("numInputVectors"))
+        numReps = numInputVectors[0]
+        self.code_gen_dict["$DEFINES$"] = [
+            """#define NumChannels1 {}\n#define PE1 {}\n#define numReps {}""".format(
+                self.get_nodeattr("NumChannels"), self.get_nodeattr("PE"), numReps,
+            )
+        ]
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        # note: the innermost dim is reversed for the input
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0, false);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        tmpl_args = self.get_template_param_values()
+        # TODO: why put some template parameters into defines and not others?
+        # should ImgDim be defined or just filled in here like we do now?
+        ishape = self.get_folded_input_shape()
+        if len(ishape) == 3:
+            imgdim = 1
+        elif len(ishape) == 5:
+            imgdim = ishape[1]
+        else:
+            raise Exception("""Unexpeted input shape""")
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """Thresholding_Batch<{}, NumChannels1, PE1, {}, {}>
+            (in0, out, threshs, numReps);""".format(
+                imgdim, tmpl_args["TSrcI"], tmpl_args["TDstI"],
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        shape = self.get_folded_output_shape()
+        shape_cpp_str = str(shape).replace("(", "{").replace(")", "}")
+
+        # note: the innermost dim is not reversed for the output
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s", false);'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                shape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}>> &in0,
+                hls::stream<ap_uint<{}>> &out
+                )""".format(
+                self.onnx_node.name,
+                self.get_instream_width(),
+                self.get_outstream_width(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+        # the channelwise parameter tensor is acc_type [PE][TMEM][N_PARAMS_PER_CHANNEL]
+        # partition for parallel access along PE and N_PARAMS_PER_CHANNEL
+        # dimensions (dims 1 and 3)
+        self.code_gen_dict["$PRAGMAS$"].append(
+            (
+                "#pragma HLS ARRAY_PARTITION variable=threshs.parameters "
+                "complete dim=1"
+            )
+        )
+        # self.code_gen_dict["$PRAGMAS$"].append(
+        #     (
+        #         "#pragma HLS ARRAY_PARTITION variable=threshs.parameters "
+        #         "complete dim=3"
+        #     )
+        # )
+
+        # set resource type
+        ram_style = self.get_nodeattr("ram_style")
+        pe = self.get_nodeattr("PE")
+        ich = self.get_nodeattr("NumChannels")
+        # if PE less than NumChannels, assign cores according to ram_style;
+        # otherwise if PE == NumChannels, Vivado HLS will unroll to FFs
+        if pe < ich:
+            if ram_style == "distributed":
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    (
+                        "#pragma HLS RESOURCE variable=threshs.parameters "
+                        "core=ROM_2P_LUTRAM"
+                    )
+                )
+            elif ram_style == "block":
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    (
+                        "#pragma HLS RESOURCE variable=threshs.parameters "
+                        "core=ROM_2P_BRAM"
+                    )
+                )
+            else:
+                raise Exception(
+                    """Invalid value for attribute ram_style! Is currently set to: {}
+                has to be set to one of ("block", "distributed")""".format(
+                        ram_style
+                    )
+                )
diff --git a/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py b/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py
index e4d106068d4d128c66b2ce5f3d6c925dfe414b90..d33d6c963c0c55309f7f258c9ec1d7723e112282 100644
--- a/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py
+++ b/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py
@@ -41,10 +41,19 @@ from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
 # output 0 is the output tensor, shape NHWC:
 #     = (1, OFMDim, OFMDim, (ConvKernelDim^2)*IFMChannels)
 
+# note: the actual data layout produced by the hlslib kernels is different
+# for depthwise and non-depthwise ops.
+# * non-depthwise SWG: (1, OFMDim, OFMDim, K, K, IFMChannels/SIMD, SIMD)
+# * depthwise SWG: (1, OFMDim, OFMDim, IFMChannels/SIMD, K, K, SIMD)
+# see test_fpgadataflow_slidingwindow.py for an example of how to transform
+# between the two layouts
+
 
 class ConvolutionInputGenerator(HLSCustomOp):
-    """Class that corresponds to finn-hlslib ConvolutionInputGenerator
-    (sliding window) function."""
+    """Class that corresponds to one of the finn-hlslib ConvolutionInputGenerator
+    (sliding window) function variants. Depending on the combination of
+    attributes (e.g. depthwise or not, whether k % stride is 0) a different
+    variant will be picked for the actual HLS implementation."""
 
     def __init__(self, onnx_node):
         super().__init__(onnx_node)
@@ -60,6 +69,7 @@ class ConvolutionInputGenerator(HLSCustomOp):
             # FINN DataTypes for inputs, weights, outputs
             "inputDataType": ("s", True, ""),
             "outputDataType": ("s", True, ""),
+            "depthwise": ("i", False, 0),
             # FPGA resource type for ConvolutionInputGenerator input buffer
             # auto -- let Vivado HLS decide
             # block -- use BRAM
@@ -106,7 +116,6 @@ class ConvolutionInputGenerator(HLSCustomOp):
         pad = 0
         ofm_dim = compute_conv_output_dim(ifm_dim, k, stride, pad)
         assert ifm_ch % simd == 0, "SIMD must divide IFMChannels"
-        assert k % stride == 0, "stride must divide kernel size k"
         wf = int((k * k * ifm_ch) // simd)
         folded_oshape = (1, ofm_dim, ofm_dim, wf, simd)
         return folded_oshape
@@ -168,6 +177,23 @@ class ConvolutionInputGenerator(HLSCustomOp):
         num_output_elems = np.prod(folded_oshape[:-1])
         return num_output_elems
 
+    def get_exp_cycles(self):
+        simd = self.get_nodeattr("SIMD")
+        ifm_ch = self.get_nodeattr("IFMChannels")
+        k = self.get_nodeattr("ConvKernelDim")
+        ifm_dim = self.get_nodeattr("IFMDim")
+        ofm_dim = self.get_nodeattr("OFMDim")
+        stride = self.get_nodeattr("Stride")
+        # since mmv != 1 is not supported yet, we set mmv for now to 1
+        mmv = 1
+        # see https://github.com/Xilinx/finn-hlslib/blob/master/slidingwindow.h
+        cycles_write_block = (ofm_dim * k * k * (ifm_ch / simd)) / mmv
+        cycles_read_block = stride * ifm_dim * (ifm_ch / simd)
+        max_cycles = max(cycles_write_block, cycles_read_block)
+        exp_cycles = ifm_dim * k * (ifm_ch / simd) + ofm_dim * max_cycles
+
+        return int(exp_cycles)
+
     def execute_node(self, context, graph):
         mode = self.get_nodeattr("exec_mode")
         node = self.onnx_node
@@ -305,12 +331,35 @@ class ConvolutionInputGenerator(HLSCustomOp):
 
     def docompute(self):
         node = self.onnx_node
-        self.code_gen_dict["$DOCOMPUTE$"] = [
-            """{}<ConvKernelDim1, IFMChannels1, Input_precision1, IFMDim1,
-                OFMDim1, SIMD1, Stride1> (in0, out, numReps);""".format(
-                node.op_type
-            )
-        ]
+        ram_style = self.get_nodeattr("ram_style")
+        map_to_hls_ram_style = {
+            "auto": "ap_resource_dflt()",
+            "block": "ap_resource_bram()",
+            "distributed": "ap_resource_lutram()",
+            "ultra": "ap_resource_uram()",
+        }
+        hls_ram_style = map_to_hls_ram_style[ram_style]
+        hls_call = node.op_type
+        # check if non optimized ConvolutionInputGenerator is needed
+        k = self.get_nodeattr("ConvKernelDim")
+        stride = self.get_nodeattr("Stride")
+        if k % stride != 0:
+            hls_call += "_kernel_stride"
+
+        if self.get_nodeattr("depthwise") == 1:
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                """{}_dws<ConvKernelDim1, IFMChannels1, Input_precision1, IFMDim1,
+                    OFMDim1, SIMD1, Stride1> (in0, out, numReps, {});""".format(
+                    hls_call, hls_ram_style
+                )
+            ]
+        else:
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                """{}<ConvKernelDim1, IFMChannels1, Input_precision1, IFMDim1,
+                    OFMDim1, SIMD1, Stride1> (in0, out, numReps, {});""".format(
+                    hls_call, hls_ram_style
+                )
+            ]
 
     def dataoutstrm(self):
         code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
@@ -356,17 +405,3 @@ class ConvolutionInputGenerator(HLSCustomOp):
         self.code_gen_dict["$PRAGMAS$"].append(
             "#pragma HLS INTERFACE ap_ctrl_none port=return"
         )
-
-    def ipgen_extra_directives(self):
-        # add directive to control input buffer memory resources
-        ram_style = self.get_nodeattr("ram_style")
-        map_to_hls_ram_style = {
-            "auto": "RAM_2P",
-            "block": "RAM_2P_BRAM",
-            "distributed": "RAM_2P_LUTRAM",
-            "ultra": "RAM_2P_URAM",
-        }
-        hls_ram_style = map_to_hls_ram_style[ram_style]
-        directive = "set_directive_resource -core %s " % hls_ram_style
-        directive += "ConvolutionInputGenerator inputBuf"
-        return [directive]
diff --git a/src/finn/custom_op/fpgadataflow/downsampler.py b/src/finn/custom_op/fpgadataflow/downsampler.py
new file mode 100644
index 0000000000000000000000000000000000000000..15d55653b4e431dead885d75650b1500150d8775
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/downsampler.py
@@ -0,0 +1,305 @@
+import os
+import numpy as np
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+
+
+class DownSampler(HLSCustomOp):
+    """Corresponds to finn-hlslib ConvolutionInputGenerator_kernel1 function.
+    Basically performs a down sampling of the image removing rows and columns."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            # spatial size of input images
+            "ImgDim": ("i", True, 0),
+            # number of channels in input image
+            "NumChannels": ("i", True, 0),
+            # Number of input columns computed in parallel
+            "SIMD": ("i", False, 1),
+            "Stride": ("i", True, 2),
+            # FINN input datatype
+            "inputDataType": ("s", True, ""),
+            # Batch size
+            "numInputVectors": ("i", False, 1),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_downsampled_odim(self):
+        "Return the down sampled spatial size of the output."
+        idim = self.get_nodeattr("ImgDim")
+        stride = self.get_nodeattr("Stride")
+        return int(np.floor((idim - 1) / stride) + 1)
+
+    def get_exp_cycles(self):
+        idim = self.get_nodeattr("ImgDim")
+        channels = self.get_nodeattr("NumChannels")
+        simd = self.get_nodeattr("SIMD")
+        batch_size = self.get_nodeattr("numInputVectors")
+        exp_cycles = channels / simd * batch_size * idim * idim
+        return int(exp_cycles)
+
+    def get_normal_input_shape(self):
+        idim = self.get_nodeattr("ImgDim")
+        num_ch = self.get_nodeattr("NumChannels")
+        batch = self.get_nodeattr("numInputVectors")
+        ishape = (batch, idim, idim, num_ch)
+        return ishape
+
+    def get_normal_output_shape(self):
+        odim = self.get_downsampled_odim()
+        num_ch = self.get_nodeattr("NumChannels")
+        batch = self.get_nodeattr("numInputVectors")
+        oshape = (batch, odim, odim, num_ch)
+        return oshape
+
+    def get_folded_input_shape(self):
+        normal_ishape = list(self.get_normal_input_shape())
+        ifm_ch = self.get_nodeattr("NumChannels")
+        simd = self.get_nodeattr("SIMD")
+        assert ifm_ch % simd == 0, "SIMD must divide input channels"
+        fold = int(normal_ishape[-1] / simd)
+        folded_ishape = normal_ishape[:-1] + [fold, simd]
+        return tuple(folded_ishape)
+
+    def get_folded_output_shape(self):
+        normal_oshape = list(self.get_normal_output_shape())
+        ifm_ch = self.get_nodeattr("NumChannels")
+        simd = self.get_nodeattr("SIMD")
+        assert ifm_ch % simd == 0, "SIMD must divide input channels"
+        fold = int(normal_oshape[-1] / simd)
+        folded_oshape = normal_oshape[:-1] + [fold, simd]
+        return tuple(folded_oshape)
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpect input shape for DownSampler."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # data type stays the same
+        dtype = model.get_tensor_datatype(node.input[0])
+        exp_idtype = self.get_input_datatype()
+        assert dtype == exp_idtype, "Unexpected datatype for DownSampler"
+        model.set_tensor_datatype(node.output[0], dtype)
+
+    def verify_node(self):
+        pass
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        ret = DataType[self.get_nodeattr("inputDataType")]
+        return ret
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output. (Same as input datatype)"""
+        return self.get_input_datatype()
+
+    def get_instream_width(self):
+        ibits = self.get_input_datatype().bitwidth()
+        simd = self.get_nodeattr("SIMD")
+        return ibits * simd
+
+    def get_outstream_width(self):
+        obits = self.get_output_datatype().bitwidth()
+        simd = self.get_nodeattr("SIMD")
+        return obits * simd
+
+    def get_number_output_values(self):
+        folded_oshape = self.get_folded_output_shape()
+        return np.prod(folded_oshape[:-1])
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "slidingwindow.h"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = []
+
+        ifm_ch = self.get_nodeattr("NumChannels")
+        self.code_gen_dict["$DEFINES$"] += ["#define IFMChannels {}".format(ifm_ch)]
+
+        ibits = self.get_input_datatype().bitwidth()
+        self.code_gen_dict["$DEFINES$"] += ["#define Input_precision {}".format(ibits)]
+
+        idim = self.get_nodeattr("ImgDim")
+        self.code_gen_dict["$DEFINES$"] += ["#define IFMDim {}".format(idim)]
+
+        simd = self.get_nodeattr("SIMD")
+        self.code_gen_dict["$DEFINES$"] += ["#define SIMD {}".format(simd)]
+
+        stride = self.get_nodeattr("Stride")
+        self.code_gen_dict["$DEFINES$"] += ["#define Stride {}".format(stride)]
+
+        batch_size = self.get_nodeattr("numInputVectors")
+        self.code_gen_dict["$DEFINES$"] += ["#define numReps {}".format(batch_size)]
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """ConvolutionInputGenerator_kernel1<IFMChannels, Input_precision,
+            IFMDim, SIMD,Stride> (in0, out, numReps);"""
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            "void %s(hls::stream<%s > &in0, hls::stream<%s > &out)"
+            % (self.onnx_node.name, packed_hls_type, packed_hls_type)
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_ishape = self.get_folded_input_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert (
+            inp.shape == exp_ishape
+        ), """Input shape doesn't
+        match expected shape (numInputVectors, ImgDim, ImgDim, NumChannels)."""
+        export_idt = self.get_input_datatype()
+
+        reshaped_input = inp.reshape(folded_ishape)
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim did not produce expected folded output shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_output = self.rtlsim(sim, rtlsim_inp)
+            odt = export_idt
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                rtlsim_output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+            # load and reshape output
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output shape doesn't match expected shape
+            (1, OutputDim, OutputDim, NumChannels)."""
diff --git a/src/finn/custom_op/fpgadataflow/duplicatestreams_batch.py b/src/finn/custom_op/fpgadataflow/duplicatestreams_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..044cfddaab51a5f9bf7aa25e9123247b10de8529
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/duplicatestreams_batch.py
@@ -0,0 +1,379 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+
+import numpy as np
+
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from onnx import helper, TensorProto
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+
+
+class DuplicateStreams_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hlslib function of the same name."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "NumChannels": ("i", True, 0),
+            "PE": ("i", True, 0),
+            # FINN DataTypes for input
+            "inputDataType": ("s", True, ""),
+            # number of input vectors, examples:
+            # [1] is a single vector (like a FC layer with batch=1)
+            # [4] is four vectors (like a FC layer with batch=4)
+            # [1, 4, 4] is four * four vectors (like a conv layer with batch=1)
+            "numInputVectors": ("ints", False, [1]),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_normal_input_shape(self):
+        ch = self.get_nodeattr("NumChannels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        ishape = tuple(vecs + [ch])
+        return ishape
+
+    def get_folded_input_shape(self):
+        ch = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        assert ch % pe == 0, "PE must divide NumChannels"
+        folds = int(ch / pe)
+        folded_ishape = tuple(vecs + [folds, pe])
+        return folded_ishape
+
+    def get_normal_output_shape(self):
+        return self.get_normal_input_shape()
+
+    def get_folded_output_shape(self):
+        return self.get_folded_input_shape()
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpected input shape."
+
+        oshape = self.get_normal_output_shape()
+        values = np.zeros(oshape).astype(np.float32)
+        split_input = np.concatenate((values, values), axis=0)
+
+        split_in = helper.make_tensor_value_info(
+            model.make_new_valueinfo_name(), TensorProto.FLOAT, oshape
+        )
+
+        model.graph.value_info.append(split_in)  # requires clean up
+        model.set_initializer(split_in.name, split_input)
+
+        shape_comp_node = helper.make_node(
+            "Split",
+            inputs=[split_in.name],
+            outputs=[self.onnx_node.output[0], self.onnx_node.output[1]],
+            axis=0,
+        )
+
+        return shape_comp_node
+
+    def infer_node_datatype(self, model):
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(self.onnx_node.output[0], odt)
+        model.set_tensor_datatype(self.onnx_node.output[1], odt)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify that all necessary attributes exist
+        try:
+            self.get_nodeattr("code_gen_dir_cppsim")
+            self.get_nodeattr("executable_path")
+            self.get_nodeattr("NumChannels")
+            self.get_nodeattr("PE")
+            self.get_nodeattr("inputDataType")
+            info_messages.append("All necessary attributes exist")
+        except Exception:
+            info_messages.append(
+                """The required GlobalAccPool_Batch attributes do not exist."""
+            )
+
+        return info_messages
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_instream_width(self):
+        """Returns input stream width."""
+        ibits = self.get_input_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        in_width = pe * ibits
+        return in_width
+
+    def get_outstream_width(self):
+        """Returns output stream width."""
+        obits = self.get_output_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        out_width = pe * obits
+        return out_width
+
+    def get_number_output_values(self):
+        return 2 * np.prod(self.get_folded_output_shape()[1:-1])
+
+    def get_exp_cycles(self):
+        # Channels/PE * batch size * fmdim * fmdim
+        return np.prod(self.get_folded_output_shape()[:-1])
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_ishape = self.get_folded_input_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert inp.shape == exp_ishape, """Input shape doesn't match expected shape ."""
+        export_idt = self.get_input_datatype()
+        # reshape input into folded form
+        inp = inp.reshape(folded_ishape)
+        # make copy before saving array
+        reshaped_input = inp.copy()
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_outputs(context, ["output0.npy", "output1.npy"])
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim \
+            did not produce expected ofolded utput shape"
+            assert (
+                context[node.output[1]].shape == folded_oshape
+            ), "cppsim \
+            did not produce expected ofolded utput shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+            context[node.output[1]] = context[node.output[1]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_dict = {
+                "inputs": {"in0": rtlsim_inp},
+                "outputs": {"out0": [], "out1": []},
+            }
+            self.rtlsim_multi_io(sim, rtlsim_dict)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_shape = self.get_folded_output_shape()
+
+            out_npy_path = "{}/output0.npy".format(code_gen_dir)
+            rtlsim_output_to_npy(
+                rtlsim_dict["outputs"]["out0"],
+                out_npy_path,
+                odt,
+                out_shape,
+                packed_bits,
+                target_bits,
+            )
+            # load and reshape output 0
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+
+            out_npy_path = "{}/output1.npy".format(code_gen_dir)
+            rtlsim_output_to_npy(
+                rtlsim_dict["outputs"]["out1"],
+                out_npy_path,
+                odt,
+                out_shape,
+                packed_bits,
+                target_bits,
+            )
+            # load and reshape output 1
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[1]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output0 shape doesn't match expected shape."""
+        assert (
+            context[node.output[1]].shape == exp_oshape
+        ), """Output1 shape doesn't match expected shape."""
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "streamtools.h"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = []
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out0 ("out0");'.format(self.get_outstream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out1 ("out1");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """DuplicateStreams_Batch<{}, {}> (in0, out0, out1, 1);""".format(
+                self.get_outstream_width(), self.get_number_output_values() // 2,
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output0.npy" % code_gen_dir
+        npy_out1 = "%s/output1.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out0, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] += [
+            'apintstream2npy<%s, %s, %d, %s>(out1, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out1,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}>> &in0,
+                hls::stream<ap_uint<{}>> &out0,
+                hls::stream<ap_uint<{}>> &out1)""".format(
+                self.onnx_node.name,
+                self.get_instream_width(),
+                self.get_outstream_width(),
+                self.get_outstream_width(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out0")
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out1")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+    def get_verilog_top_module_intf_names(self):
+        intf_names = super().get_verilog_top_module_intf_names()
+        intf_names["m_axis"] = ["out0_V_V", "out1_V_V"]
+        return intf_names
diff --git a/src/finn/custom_op/fpgadataflow/fmpadding_batch.py b/src/finn/custom_op/fpgadataflow/fmpadding_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9a9dc4340b18578550a9c453d90de86234d1cad
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/fmpadding_batch.py
@@ -0,0 +1,313 @@
+import os
+import numpy as np
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+
+
+class FMPadding_Batch(HLSCustomOp):
+    """Corresponds to finn-hlslib FMPadding_Batch function.
+    Pads input image by given amount."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            # spatial size of input images
+            "ImgDim": ("i", True, 0),
+            # total padding (per dimension) to apply
+            "Padding": ("i", True, 2),
+            # number of channels in input image
+            "NumChannels": ("i", True, 0),
+            # SIMD Input parallelism
+            "SIMD": ("i", False, 1),
+            # FINN input datatype
+            "inputDataType": ("s", True, ""),
+            # controls distribution of padded pixels
+            # in case of uneven padding -- see FMPadding fxn
+            # in hlslib
+            "PaddingStyle": ("i", False, 2),
+            # shape describing input vecs per execution
+            "numInputVectors": ("i", False, 1),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_padded_odim(self):
+        "Return the padded spatial size of the output."
+
+        idim = self.get_nodeattr("ImgDim")
+        pad = self.get_nodeattr("Padding")
+        return idim + pad
+
+    def get_exp_cycles(self):
+        odim = self.get_padded_odim()
+        channels = self.get_nodeattr("NumChannels")
+        simd = self.get_nodeattr("SIMD")
+        batch_size = self.get_nodeattr("numInputVectors")
+        exp_cycles = (channels / simd) * batch_size * odim * odim
+        return exp_cycles
+
+    def get_normal_input_shape(self):
+        idim = self.get_nodeattr("ImgDim")
+        num_ch = self.get_nodeattr("NumChannels")
+
+        ishape = (1, idim, idim, num_ch)
+        return ishape
+
+    def get_normal_output_shape(self):
+        odim = self.get_padded_odim()
+        num_ch = self.get_nodeattr("NumChannels")
+
+        oshape = (1, odim, odim, num_ch)
+        return oshape
+
+    def get_folded_input_shape(self):
+        normal_ishape = list(self.get_normal_input_shape())
+        ifm_ch = self.get_nodeattr("NumChannels")
+        simd = self.get_nodeattr("SIMD")
+        assert ifm_ch % simd == 0, "SIMD must divide input channels"
+        fold = int(normal_ishape[-1] / simd)
+        folded_ishape = normal_ishape[:-1] + [fold, simd]
+        return tuple(folded_ishape)
+
+    def get_folded_output_shape(self):
+        normal_oshape = list(self.get_normal_output_shape())
+        ifm_ch = self.get_nodeattr("NumChannels")
+        simd = self.get_nodeattr("SIMD")
+        assert ifm_ch % simd == 0, "SIMD must divide input channels"
+        fold = int(normal_oshape[-1] / simd)
+        folded_oshape = normal_oshape[:-1] + [fold, simd]
+        return tuple(folded_oshape)
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpect input shape for SameResize."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # data type stays the same
+        dtype = model.get_tensor_datatype(node.input[0])
+        exp_idtype = self.get_input_datatype()
+        assert dtype == exp_idtype, "Unexpected datatype for FMPadding_Batch"
+        model.set_tensor_datatype(node.output[0], dtype)
+
+    def verify_node(self):
+        pass
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        ret = DataType[self.get_nodeattr("inputDataType")]
+        # the hlslib op always pads with zeros, so ensure that the DataType
+        # is able to represent zeros
+        assert ret.allowed(0), "FMPadding_Batch DataType must support zero"
+        return ret
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output. (Same as input datatype)"""
+        return self.get_input_datatype()
+
+    def get_instream_width(self):
+        ibits = self.get_input_datatype().bitwidth()
+        simd = self.get_nodeattr("SIMD")
+        return ibits * simd
+
+    def get_outstream_width(self):
+        obits = self.get_output_datatype().bitwidth()
+        simd = self.get_nodeattr("SIMD")
+        return obits * simd
+
+    def get_number_output_values(self):
+        folded_oshape = self.get_folded_output_shape()
+        return np.prod(folded_oshape[:-1])
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "streamtools.h"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = [
+            """#define ImgDim1 {}\n#define OutputDim1 {}\n
+            #define Padding1 {}\n#define NumChannels1 {}\n
+            #define PaddingStyle1 {}\n#define numReps {}
+            #define SIMD1 {}\n""".format(
+                self.get_nodeattr("ImgDim"),
+                self.get_padded_odim(),
+                self.get_nodeattr("Padding"),
+                self.get_nodeattr("NumChannels"),
+                self.get_nodeattr("PaddingStyle"),
+                self.get_nodeattr("numInputVectors"),
+                self.get_nodeattr("SIMD"),
+            )
+        ]
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        in_t = self.get_input_datatype().get_hls_datatype_str()
+        node = self.onnx_node
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """{}<ImgDim1, OutputDim1, Padding1, NumChannels1,SIMD1,
+            {}, PaddingStyle1> (in0, out, numReps);""".format(
+                node.op_type, in_t
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            "void %s(hls::stream<%s > &in0, hls::stream<%s > &out)"
+            % (self.onnx_node.name, packed_hls_type, packed_hls_type)
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_ishape = self.get_folded_input_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert (
+            inp.shape == exp_ishape
+        ), """Input shape doesn't
+        match expected shape (1, ImgDim, ImgDim, NumChannels)."""
+        export_idt = self.get_input_datatype()
+
+        reshaped_input = inp.reshape(folded_ishape)
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim did not produce expected folded output shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_output = self.rtlsim(sim, rtlsim_inp)
+            odt = export_idt
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                rtlsim_output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+            # load and reshape output
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output shape doesn't match expected shape
+            (1, OutputDim, OutputDim, NumChannels)."""
diff --git a/src/finn/custom_op/fpgadataflow/globalaccpool_batch.py b/src/finn/custom_op/fpgadataflow/globalaccpool_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a75858880a072345ef942ca91feabf0bec9ab36
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/globalaccpool_batch.py
@@ -0,0 +1,348 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+
+import numpy as np
+
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from onnx import TensorProto, helper
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+
+
+class GlobalAccPool_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hlslib AccPool_Batch function."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "NumChannels": ("i", True, 0),
+            "PE": ("i", True, 0),
+            # FINN DataTypes for input
+            "inputDataType": ("s", True, ""),
+            # number of input vectors, examples:
+            # [1] is a single vector (like a FC layer with batch=1)
+            # [4] is four vectors (like a FC layer with batch=4)
+            # [1, 4, 4] is four * four vectors (like a conv layer with batch=1)
+            "numInputVectors": ("ints", False, [1]),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_normal_input_shape(self):
+        ch = self.get_nodeattr("NumChannels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        ishape = tuple(vecs + [ch])
+        return ishape
+
+    def get_folded_input_shape(self):
+        ch = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        assert ch % pe == 0, "PE must divide NumChannels"
+        folds = int(ch / pe)
+        folded_ishape = tuple(vecs + [folds, pe])
+        return folded_ishape
+
+    def get_normal_output_shape(self):
+        ch = self.get_nodeattr("NumChannels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        if len(vecs) == 1:
+            oshape = tuple(vecs + [ch])
+        elif len(vecs) == 3:
+            oshape = tuple([vecs[0]] + [1, 1, ch])
+        return oshape
+
+    def get_folded_output_shape(self):
+        ch = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        unfolded_shape = list(self.get_normal_output_shape())
+        assert ch % pe == 0, "PE must divide NumChannels"
+        folds = int(ch / pe)
+        oshape = tuple(unfolded_shape[:-1] + [folds, pe])
+        return oshape
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpected input shape."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten(),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(self.onnx_node.output[0], odt)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify that all necessary attributes exist
+        try:
+            self.get_nodeattr("code_gen_dir_cppsim")
+            self.get_nodeattr("executable_path")
+            self.get_nodeattr("NumChannels")
+            self.get_nodeattr("PE")
+            self.get_nodeattr("inputDataType")
+            info_messages.append("All necessary attributes exist")
+        except Exception:
+            info_messages.append(
+                """The required GlobalAccPool_Batch attributes do not exist."""
+            )
+
+        # verify that input data is 2D
+        if len(self.get_nodeattr("numInputVectors")) != 3:
+            info_messages.append("""GlobalAccPool_Batch requires 2D data input.""")
+            raise Exception
+
+        return info_messages
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        # determine data type from image size and input type
+        idt = DataType[self.get_nodeattr("inputDataType")]
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        npixels = vecs[-1] * vecs[-2]
+        if idt.signed():
+            extreme_value = npixels * idt.min()
+        else:
+            extreme_value = npixels * idt.max()
+        return DataType.get_smallest_possible(extreme_value)
+
+    def get_instream_width(self):
+        """Returns input stream width."""
+        ibits = self.get_input_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        in_width = pe * ibits
+        return in_width
+
+    def get_outstream_width(self):
+        """Returns output stream width."""
+        obits = self.get_output_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        out_width = pe * obits
+        return out_width
+
+    def get_number_output_values(self):
+        return np.prod(self.get_folded_output_shape()[1:-1])
+
+    def get_exp_cycles(self):
+        # Channels/PE * batch size * idim * idim + Channels/PE
+        ch = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        folds = int(ch / pe)
+        return np.prod(self.get_folded_input_shape()[:-1]) + folds
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_ishape = self.get_folded_input_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert inp.shape == exp_ishape, """Input shape doesn't match expected shape ."""
+        export_idt = self.get_input_datatype()
+        # reshape input into folded form
+        inp = inp.reshape(folded_ishape)
+        # make copy before saving array
+        reshaped_input = inp.copy()
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim \
+            did not produce expected ofolded utput shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_output = self.rtlsim(sim, rtlsim_inp)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                rtlsim_output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+            # load and reshape output
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output shape doesn't match expected shape."""
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "maxpool.h"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = []
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """AccPool_Batch<{}, {}, {}, {}, {}> (in0, out, 1);""".format(
+                self.get_normal_input_shape()[1],
+                self.get_nodeattr("NumChannels"),
+                self.get_input_datatype().get_hls_datatype_str(),
+                self.get_nodeattr("PE"),
+                self.get_output_datatype().get_hls_datatype_str(),
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}>> &in0,
+                hls::stream<ap_uint<{}>> &out)""".format(
+                self.onnx_node.name,
+                self.get_instream_width(),
+                self.get_outstream_width(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
diff --git a/src/finn/custom_op/fpgadataflow/iodma.py b/src/finn/custom_op/fpgadataflow/iodma.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d0374445d816f1e8d49ed92cf7aa67b024f9ac1
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/iodma.py
@@ -0,0 +1,360 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import numpy as np
+import math
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+
+
+# the IODMA inerfaces a memory-mapped AXI interface and an AXI stream
+# direction "in": pulls data from AXI-MM to AXI stream
+# direction "out": pushes data from AXI stream to AXI-MM
+
+# DMA Addressing
+# - burst mode can be "wrap" or "increment"
+# - "increment" bursts will increment the address when moving to the next image
+# - "wrap" bursts will reinitialize the address to the start address,
+#   and are useful for e.g. streaming weights, where the same buffer is
+#   repeatedly read into the FPGA
+# - no additional alignment restrictions beyond anything specified in the AXI spec
+
+# Interfaces
+# - AXI-MM name specified by intfName unless this is set to "" (empty, the default)
+#   in which case output AXI-MM are named "out" and input AXI-MM are named "in0"
+# - AXI-MM interface width (in bits) is specified by intfWidth
+# - AXI-Stream interface width (in bits) is specified by streamWidth
+# - If inftWidth and streamWidth are not equal, the DMA core performs
+#   width conversion by going up to the least common multiple of bitwidths
+#   e.g. intfWidth=32b -> 96b -> sreamWidth=24b
+# - transfers occur in multiples of the AXI-MM interface width, therefore
+#   the total number of bits in the tensor must be a multiple of intfWidth
+# - transfers occur in multiples of the AXI-Stream interface width, therefore
+#   the total number of bits in the tensor must be a multiple of streamWidth
+# - both interface widths must be a multiple of 8b (AXI protocol requirement)
+# - in most systems, intfWidth is also restricted to a power of 2 (e.g. Vitis)
+#   but this is not universal so we don't check here explicitly
+
+# Input/output tensor sizes shapes
+# - The data being moved is a tensor of shape numInputVectors+[NumChannels]
+# - The data type of the tensor elements is specified by dataType
+# - on the stream side
+#       -the normal shape is the same as the ONNX tensor attached to it
+#       -the folded shape is computed from the stream width and normal shape
+# - on the AXI-MM side
+#       -the normal shape is the same as the one on the stream side
+#       -the folded shape is not defined
+
+
+class IODMA(HLSCustomOp):
+    """Class that corresponds to finn-hlslib DMA function(s)."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "NumChannels": ("i", True, 0),
+            # FINN input datatype
+            "dataType": ("s", True, ""),
+            # Stream parameters
+            "streamWidth": ("i", False, 32),
+            # DMA-specific parameters
+            "intfWidth": ("i", False, 32),
+            "burstMode": ("s", False, "increment"),
+            "direction": ("s", False, "in"),
+            # shape describing input vecs per execution
+            "numInputVectors": ("ints", False, [1]),
+            # name of axi-mm interface
+            "intfName": ("s", False, ""),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_normal_input_shape(self):
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        num_ch = self.get_nodeattr("NumChannels")
+        ishape = tuple(vecs + [num_ch])
+        return ishape
+
+    def get_normal_output_shape(self):
+        return self.get_normal_input_shape()
+
+    def get_folded_input_shape(self):
+        if self.get_nodeattr("direction") == "in":
+            raise ValueError("Folded input shape not defined for input IODMA")
+        else:
+            shape = list(self.get_normal_input_shape())
+            itype_bits = self.get_input_datatype().bitwidth()
+            intfw = self.get_nodeattr("streamWidth")
+            assert (
+                intfw % itype_bits == 0
+            ), "Input stream width must be a multiple of datatype bits"
+            elems_per_word = intfw // itype_bits
+            assert shape[-1] % elems_per_word == 0, "Fold depth must be integer"
+            fold_depth = shape[-1] // elems_per_word
+            shape[-1] = fold_depth
+            shape.append(elems_per_word)
+            return tuple(shape)
+
+    def get_folded_output_shape(self):
+        if self.get_nodeattr("direction") == "out":
+            raise ValueError("Folded output shape not defined for output IODMA")
+        else:
+            shape = list(self.get_normal_output_shape())
+            itype_bits = self.get_output_datatype().bitwidth()
+            intfw = self.get_nodeattr("streamWidth")
+            assert (
+                intfw % itype_bits == 0
+            ), "Input stream width must be a multiple of datatype bits"
+            elems_per_word = intfw // itype_bits
+            assert shape[-1] % elems_per_word == 0, "Fold depth must be integer"
+            fold_depth = shape[-1] // elems_per_word
+            shape[-1] = fold_depth
+            shape.append(elems_per_word)
+            return tuple(shape)
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpected input shape."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # data type stays the same
+        dtype = model.get_tensor_datatype(node.input[0])
+        exp_idtype = self.get_input_datatype()
+        assert dtype == exp_idtype, "Unexpected datatype."
+        model.set_tensor_datatype(node.output[0], dtype)
+
+    def verify_node(self):
+        pass
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("dataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output. (Same as input datatype)"""
+        return self.get_input_datatype()
+
+    def get_instream_width(self):
+        if self.get_nodeattr("direction") == "in":
+            return self.get_nodeattr("intfWidth")
+        elif self.get_nodeattr("direction") == "out":
+            return self.get_nodeattr("streamWidth")
+        else:
+            raise ValueError("Invalid IODMA direction, please set to in or out")
+
+    def get_outstream_width(self):
+        if self.get_nodeattr("direction") == "out":
+            return self.get_nodeattr("intfWidth")
+        elif self.get_nodeattr("direction") == "in":
+            return self.get_nodeattr("streamWidth")
+        else:
+            raise ValueError("Invalid IODMA direction, please set to in or out")
+
+    def get_number_output_values(self):
+        oshape = self.get_normal_output_shape()
+        itype_bits = self.get_input_datatype().bitwidth()
+        stream_width = self.get_nodeattr("streamWidth")
+        nelems = np.prod(oshape)
+        nbits = nelems * itype_bits
+        assert (
+            nbits % stream_width == 0
+        ), "DMA: total transfer size must be word multiple"
+        ovalues = nbits // stream_width
+        return ovalues
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "dma.h"']
+        self.code_gen_dict["$GLOBALS$"].append('#include "streamtools.h"')
+
+    def defines(self, var):
+        itype_bits = self.get_input_datatype().bitwidth()
+        total_bits = itype_bits * np.prod(self.get_normal_input_shape())
+        assert total_bits % 8 == 0, "DMA input not a multiple of 1 Byte"
+        total_bytes = total_bits // 8
+        self.code_gen_dict["$DEFINES$"] = [
+            """#define NumBytes1 {}\n#define DataWidth1 {}\n""".format(
+                total_bytes, self.get_nodeattr("intfWidth")
+            )
+        ]
+
+    def get_ap_int_max_w(self):
+        "Return the maximum width of any ap_int used in this module."
+        instream = self.get_instream_width()
+        outstream = self.get_outstream_width()
+        width_lcm = (instream * outstream) // math.gcd(instream, outstream)
+        return width_lcm
+
+    def docompute(self):
+        direction = self.get_nodeattr("direction")
+        mode = self.get_nodeattr("burstMode")
+        if direction == "in":
+            if mode == "wrap":
+                func = "Mem2Stream_Batch_external_wmem"
+            else:
+                func = "Mem2Stream_Batch"
+            dwc_func = "WidthAdjustedOutputStream"
+        elif direction == "out":
+            func = "Stream2Mem_Batch"
+            dwc_func = "WidthAdjustedInputStream"
+        else:
+            raise ValueError("Invalid IODMA direction, please set to in or out")
+        # define templates for instantiation
+        dma_inst_template = func + "<DataWidth1, NumBytes1>(%s, %s, numReps);"
+        dwc_inst_template = dwc_func + "<%d, %d, %d> %s(%s, numReps);"
+        # do stream infrastructure and instantiations
+        intfw = self.get_nodeattr("intfWidth")
+        strmw = self.get_nodeattr("streamWidth")
+        width_lcm = (strmw * intfw) // math.gcd(strmw, intfw)
+        # we always need two streams: one of width_lcm, and one of intfw width
+        # because we use WidthAdjustedInputStream,
+        dtype_bits = self.get_input_datatype().bitwidth()
+        total_bits = dtype_bits * np.prod(self.get_normal_input_shape())
+        if direction == "in":
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                dwc_inst_template
+                % (width_lcm, strmw, total_bits // width_lcm, "dwc_lcm", "out"),
+                dwc_inst_template
+                % (intfw, width_lcm, total_bits // intfw, "dwc_intfw", "dwc_lcm"),
+                dma_inst_template % ("in0", "dwc_intfw"),
+            ]
+        else:
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                dwc_inst_template
+                % (strmw, width_lcm, total_bits // strmw, "dwc_lcm", "in0"),
+                dwc_inst_template
+                % (width_lcm, intfw, total_bits // width_lcm, "dwc_intfw", "dwc_lcm"),
+                dma_inst_template % ("dwc_intfw", "out"),
+            ]
+
+    def blackboxfunction(self):
+        packed_ibits = self.get_instream_width()
+        packed_hls_type_in = "ap_uint<%d>" % packed_ibits
+        packed_obits = self.get_outstream_width()
+        packed_hls_type_out = "ap_uint<%d>" % packed_obits
+        direction = self.get_nodeattr("direction")
+        if direction == "in":
+            self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+                "void %s(%s *in0, hls::stream<%s > &out, unsigned int numReps)"
+                % (self.onnx_node.name, packed_hls_type_in, packed_hls_type_out)
+            ]
+        elif direction == "out":
+            self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+                "void %s(hls::stream<%s > &in0, %s *out, unsigned int numReps)"
+                % (self.onnx_node.name, packed_hls_type_in, packed_hls_type_out)
+            ]
+        else:
+            raise ValueError("Invalid IODMA direction, please set to in or out")
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = [
+            "#pragma HLS INTERFACE s_axilite port=numReps bundle=control"
+        ]
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE s_axilite port=return bundle=control"
+        )
+        direction = self.get_nodeattr("direction")
+        intfname = self.get_nodeattr("intfName")
+        if direction == "in":
+            if intfname == "":
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    "#pragma HLS INTERFACE m_axi offset=slave port=in0"
+                )
+            else:
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    "#pragma HLS INTERFACE m_axi offset=slave port=%s" % (intfname)
+                )
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS INTERFACE s_axilite port=in0 bundle=control"
+            )
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS INTERFACE axis port=out"
+            )
+        elif direction == "out":
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS INTERFACE axis port=in0"
+            )
+            if intfname == "":
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    "#pragma HLS INTERFACE m_axi offset=slave port=out"
+                )
+            else:
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    "#pragma HLS INTERFACE m_axi offset=slave port=%s" % (intfname)
+                )
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS INTERFACE s_axilite port=out bundle=control"
+            )
+        else:
+            raise ValueError("Invalid IODMA direction, please set to in or out")
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS DATAFLOW")
+
+    def execute_node(self, context, graph):
+        pass
+
+    def dataoutstrm(self):
+        pass
+
+    def read_npy_data(self):
+        pass
+
+    def save_as_npy(self):
+        pass
+
+    def strm_decl(self):
+        pass
+
+    def get_verilog_top_module_intf_names(self):
+        intf_names = super().get_verilog_top_module_intf_names()
+        if self.get_nodeattr("direction") == "out":
+            intf_names["s_axis"] = ["in0_V_V"]
+            intf_names["m_axis"] = []
+        else:
+            intf_names["s_axis"] = []
+            intf_names["m_axis"] = ["out_V_V"]
+        intf_names["axilite"] = ["s_axi_control"]
+        intf_names["aximm"] = ["m_axi_gmem"]
+        return intf_names
diff --git a/src/finn/custom_op/fpgadataflow/labelselect_batch.py b/src/finn/custom_op/fpgadataflow/labelselect_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e206d2058076802a48b69f4c69cccf744489f31
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/labelselect_batch.py
@@ -0,0 +1,355 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+
+import numpy as np
+
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from onnx import TensorProto, helper
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+from finn.util.basic import roundup_to_integer_multiple
+
+
+class LabelSelect_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hlslib LabelSelect_Batch function."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+        odt_name = self.get_nodeattr("outputDataType")
+        if odt_name == "":
+            # If not provided compute min size
+            labels = self.get_nodeattr("Labels")
+            odt = DataType.get_smallest_possible(labels - 1)
+            # ensure a datatype divisible by 8-bits in case this is the last node
+            bw = roundup_to_integer_multiple(odt.bitwidth(), 8)
+            new_odt_name = odt.name.replace(str(odt.bitwidth()), str(bw))
+            odt = DataType[new_odt_name]
+            odt_name = odt.name
+            self.set_nodeattr("outputDataType", odt_name)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "Labels": ("i", True, 0),
+            "PE": ("i", True, 0),
+            "K": ("i", True, 0),
+            # FINN DataTypes for input
+            "inputDataType": ("s", True, ""),
+            "outputDataType": ("s", False, ""),
+            # number of input vectors, examples:
+            # [1] is a single vector (like a FC layer with batch=1)
+            # [4] is four vectors (like a FC layer with batch=4)
+            # [1, 4, 4] is four * four vectors (like a conv layer with batch=1)
+            "numInputVectors": ("ints", False, [1]),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_normal_input_shape(self):
+        nlabels = self.get_nodeattr("Labels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        ishape = tuple(vecs + [nlabels])
+        return ishape
+
+    def get_folded_input_shape(self):
+        nlabels = self.get_nodeattr("Labels")
+        pe = self.get_nodeattr("PE")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        assert nlabels % pe == 0, "PE must divide Labels"
+        folds = int(nlabels / pe)
+        folded_ishape = tuple(vecs + [folds, pe])
+        return folded_ishape
+
+    def get_normal_output_shape(self):
+        k = self.get_nodeattr("K")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        oshape = tuple(vecs + [k])
+        return oshape
+
+    def get_folded_output_shape(self):
+        k = self.get_nodeattr("K")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        oshape = tuple(vecs + [k, 1])
+        return oshape
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpected input shape."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.int64)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.INT64,
+                dims=values.shape,
+                vals=values.flatten(),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # check input datatype against property
+        idt = model.get_tensor_datatype(node.input[0])
+        self.set_nodeattr("inputDataType", idt.name)
+
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(self.onnx_node.output[0], odt)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify that all necessary attributes exist
+        try:
+            self.get_nodeattr("code_gen_dir_cppsim")
+            self.get_nodeattr("executable_path")
+            self.get_nodeattr("Labels")
+            self.get_nodeattr("PE")
+            self.get_nodeattr("K")
+            self.get_nodeattr("inputDataType")
+            self.get_nodeattr("outputDataType")
+            info_messages.append("All necessary attributes exist")
+        except Exception:
+            info_messages.append(
+                """The required LabelSelect_Batch attributes do not exist."""
+            )
+
+        # verify that input data is 1D
+        if len(self.get_nodeattr("numInputVectors")) > 1:
+            info_messages.append("""LabelSelect_Batch requires 1D data input.""")
+            raise Exception
+
+        return info_messages
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        ret = DataType[self.get_nodeattr("inputDataType")]
+        return ret
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        ret = DataType[self.get_nodeattr("outputDataType")]
+        return ret
+
+    def get_instream_width(self):
+        """Returns input stream width."""
+        ibits = self.get_input_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        in_width = pe * ibits
+        return in_width
+
+    def get_outstream_width(self):
+        """Returns output stream width."""
+        return self.get_output_datatype().bitwidth()
+
+    def get_number_output_values(self):
+        return self.get_nodeattr("K")
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_ishape = self.get_folded_input_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert inp.shape == exp_ishape, """Input shape doesn't match expected shape ."""
+        export_idt = self.get_input_datatype()
+        # reshape input into folded form
+        inp = inp.reshape(folded_ishape)
+        # make copy before saving array
+        reshaped_input = inp.copy()
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim \
+            did not produce expected ofolded utput shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_output = self.rtlsim(sim, rtlsim_inp)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                rtlsim_output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+            # load and reshape output
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output shape doesn't match expected shape."""
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "maxpool.h"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = []
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+
+        # Calling npy2apintstream with reverse_inner = false to have LE packing
+        # as required by HLS fxn LabelSelect_Batch
+        # Also notice that StreamingDataWidthConverter_Batch performs LE packing
+
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0,false);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        node = self.onnx_node
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """{}<{}, {}, {}, {}, {} > (in0, out, 1);""".format(
+                node.op_type,
+                self.get_nodeattr("Labels"),
+                self.get_nodeattr("PE"),
+                self.get_nodeattr("K"),
+                self.get_input_datatype().get_hls_datatype_str(),
+                self.get_output_datatype().get_hls_datatype_str(),
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s");'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}*{}>> &in0,
+                hls::stream<ap_uint<{}> > &out)""".format(
+                self.onnx_node.name,
+                self.get_nodeattr("PE"),
+                self.get_input_datatype().bitwidth(),
+                self.get_output_datatype().bitwidth(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
diff --git a/src/finn/custom_op/fpgadataflow/pool_batch.py b/src/finn/custom_op/fpgadataflow/pool_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a2fa6889ae0ebb94976d50b0fc8362d01a63bea
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/pool_batch.py
@@ -0,0 +1,427 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import numpy as np
+
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from finn.core.datatype import DataType
+from onnx import TensorProto, helper
+from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy
+
+
+class Pool_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hlslib Pool_batch function.
+    Requires ConvolutionInputGenerator(depthwise == 1) to format its input
+
+    Input shape (BatchSize,OutImgDim,OutImgDim,KernelSize^2*Channels)
+    Output shape (BatchSize,OutImgDim,OutImgDim,Channels)
+
+    Notes:
+    # The input shape was chosen to be compatible with im2col (only true when there
+    is not folding).
+
+    # The actual data layout produced by the hlslib kernels is different
+    for depthwise ops.
+     * depthwise SWG: (1, OFMDim, OFMDim, IFMChannels/PE, K, K, PE)
+
+    Channels can be folded using PE (SIMD from the input perspective)
+    """
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "Channels": ("i", True, 0),
+            "PE": ("i", True, 1),
+            "KernelSize": ("i", True, 0),
+            # Function:
+            #  - MaxPool
+            #  - AvgPool (not yet supported, but HLSLIB does)
+            #  - AccPool (not yet supported, but HLSLIB does)
+            "Function": ("s", True, ""),
+            "OutImgDim": ("i", True, 0),
+            # FINN DataTypes for inputs/outputs
+            "InputDataType": ("s", True, ""),
+            "OutputDataType": ("s", True, ""),
+            "AccumBits": ("i", False, 0),
+            "Size": ("i", False, 1),
+            "BatchSize": ("i", False, 1),
+        }
+
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("InputDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        fxn = self.get_nodeattr("Function")
+        odt = DataType[self.get_nodeattr("OutputDataType")]
+
+        if fxn == "MaxPool":
+            # Same as input
+            idt = DataType[self.get_nodeattr("InputDataType")]
+            assert odt == idt, "In datatype must be equal to out datatype for Maxpool"
+        elif fxn == "QuantAvgPool":
+            idt = DataType[self.get_nodeattr("InputDataType")]
+            assert (
+                idt.signed() == odt.signed()
+            ), """QuantAvgPool: Can't mix signed
+            and unsigned datatypes"""
+        else:
+            raise Exception("Pool_Batch doesn't currently support " + fxn)
+
+        return odt
+
+    def get_normal_input_shape(self):
+        ifm_ch = self.get_nodeattr("Channels")
+        odim = self.get_nodeattr("OutImgDim")
+        batch_size = self.get_nodeattr("BatchSize")
+        k = self.get_nodeattr("KernelSize")
+        ishape = (batch_size, odim, odim, k * k * ifm_ch)
+        return ishape
+
+    def get_folded_input_shape(self):
+        normal_ishape = list(self.get_normal_input_shape())
+        ifm_ch = self.get_nodeattr("Channels")
+        pe = self.get_nodeattr("PE")
+        assert ifm_ch % pe == 0, "PE must divide input channels"
+        fold = int(normal_ishape[-1] / pe)
+        folded_ishape = normal_ishape[:-1] + [fold, pe]
+        return tuple(folded_ishape)
+
+    def get_normal_output_shape(self):
+        ofm_ch = self.get_nodeattr("Channels")
+        odim = self.get_nodeattr("OutImgDim")
+        batch_size = self.get_nodeattr("BatchSize")
+        oshape = (batch_size, odim, odim, ofm_ch)
+        return oshape
+
+    def get_folded_output_shape(self):
+        normal_oshape = list(self.get_normal_output_shape())
+        ifm_ch = self.get_nodeattr("Channels")
+        pe = self.get_nodeattr("PE")
+        assert ifm_ch % pe == 0, "PE must divide input channels"
+        fold = int(ifm_ch / pe)
+        folded_oshape = normal_oshape[:-1] + [fold, pe]
+        return tuple(folded_oshape)
+
+    def get_number_output_values(self):
+        folded_oshape = self.get_folded_output_shape()
+        return np.prod(folded_oshape[1:-1])
+
+    def get_exp_cycles(self):
+        # (Channels * kernel * kernel) / PE * odim * odim * batch_size
+        ifm_ch = self.get_nodeattr("Channels")
+        pe = self.get_nodeattr("PE")
+        k = self.get_nodeattr("KernelSize")
+        odim = self.get_nodeattr("OutImgDim")
+        batch_size = self.get_nodeattr("BatchSize")
+        exp_cycles = ((ifm_ch * k * k) / pe) * odim * odim * batch_size
+        return int(exp_cycles)
+
+    def get_instream_width(self):
+        dt_bits = self.get_input_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        in_width = int(dt_bits * pe)
+        return in_width
+
+    def get_outstream_width(self):
+        dt_bits = self.get_output_datatype().bitwidth()
+        pe = self.get_nodeattr("PE")
+        out_width = int(dt_bits * pe)
+        return out_width
+
+    def make_shape_compatible_op(self, model):
+        exp_ishape = self.get_normal_input_shape()
+        oshape = self.get_normal_output_shape()
+        ishape = tuple(model.get_tensor_shape(self.onnx_node.input[0]))
+        assert ishape == exp_ishape, "Unexpected input shape for Pool_Batch."
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # data type stays the same
+        dtype = self.get_output_datatype()
+        model.set_tensor_datatype(node.output[0], dtype)
+
+    def verify_node(self):
+        info_messages = []
+
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify the number of inputs
+        if len(self.onnx_node.input) == 1:
+            info_messages.append("The number of inputs is correct")
+        else:
+            info_messages.append("""Pool_Batch needs 1 data input""")
+
+        # check supported function
+        fnx = self.get_nodeattr("Function")
+        if fnx in ["MaxPool", "QuantAvgPool"]:
+            info_messages.append(
+                "Attribute Function contains a supported pool function"
+            )
+        else:
+            info_messages.append(
+                "Attribute Function contains an unsupported pool function"
+            )
+        return info_messages
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "maxpool.h"']
+        self.code_gen_dict["$GLOBALS$"] += ['#include "pool.hpp"']
+
+    def defines(self, var):
+        self.code_gen_dict["$DEFINES$"] = []
+
+        ifm_ch = self.get_nodeattr("Channels")
+        self.code_gen_dict["$DEFINES$"] += ["#define Channels {}".format(ifm_ch)]
+
+        pe = self.get_nodeattr("PE")
+        self.code_gen_dict["$DEFINES$"] += ["#define PE {}".format(pe)]
+
+        k = self.get_nodeattr("KernelSize")
+        self.code_gen_dict["$DEFINES$"] += ["#define KernelSize {}".format(k)]
+
+        odim = self.get_nodeattr("OutImgDim")
+        self.code_gen_dict["$DEFINES$"] += ["#define OFMDim {}".format(odim)]
+
+        numReps = self.get_nodeattr("BatchSize")
+        self.code_gen_dict["$DEFINES$"] += ["#define numReps {}".format(numReps)]
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0,false);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        idt = self.get_input_datatype()
+        i_hls_dt = idt.get_hls_datatype_str()
+        odt = self.get_output_datatype()
+        o_hls_dt = odt.get_hls_datatype_str()
+        size = self.get_nodeattr("Size")
+        accum_bits = self.get_nodeattr("AccumBits")
+        self.code_gen_dict["$DOCOMPUTE$"] = []
+
+        fxn = self.get_nodeattr("Function")
+        if fxn == "MaxPool":
+            self.code_gen_dict["$DOCOMPUTE$"] += [
+                "MaxPoolFunction<{},KernelSize> pool_fxn;".format(i_hls_dt)
+            ]
+        elif fxn == "QuantAvgPool":
+            if idt.signed():
+                act_hls_dt = "ap_int<{}>".format(accum_bits)
+            else:
+                act_hls_dt = "ap_uint<{}>".format(accum_bits)
+            self.code_gen_dict["$DOCOMPUTE$"] += [
+                "QuantAvgPoolFunction<{},{},{}> pool_fxn;".format(
+                    act_hls_dt, o_hls_dt, size
+                )
+            ]
+        else:
+            raise Exception("Pool_Batch doesn't currently support " + fxn)
+
+        self.code_gen_dict["$DOCOMPUTE$"] += [
+            """Pool_batch<Channels, PE, KernelSize,Slice<{} >, Slice< {} > >
+        (in0,out, pool_fxn, OFMDim*OFMDim*numReps);""".format(
+                i_hls_dt, o_hls_dt
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        oshape = self.get_folded_output_shape()
+        oshape_cpp_str = str(oshape).replace("(", "{").replace(")", "}")
+
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s",false);'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                oshape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        packed_ibits = self.get_instream_width()
+        packed_in_hls_type = "ap_uint<%d>" % packed_ibits
+
+        packed_obits = self.get_outstream_width()
+        packed_out_hls_type = "ap_uint<%d>" % packed_obits
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            "void %s(hls::stream<%s > &in0, hls::stream<%s > &out)"
+            % (self.onnx_node.name, packed_in_hls_type, packed_out_hls_type)
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+        exp_ishape = self.get_normal_input_shape()
+        folded_ishape = self.get_folded_input_shape()
+        exp_oshape = self.get_normal_output_shape()
+        folded_oshape = self.get_folded_output_shape()
+
+        # TODO ensure codegen dir exists
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        inp = context[node.input[0]]
+
+        assert str(inp.dtype) == "float32", "Input datatype is not float32"
+        assert (
+            inp.shape == exp_ishape
+        ), """Input shape doesn't
+        match expected shape (batch_size,odim,odim,k*k*ifm_ch)."""
+
+        export_idt = self.get_input_datatype()
+        reshaped_input = inp.reshape(folded_ishape)
+
+        np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_input)
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == folded_oshape
+            ), "cppsim did not produce expected folded output shape"
+            context[node.output[0]] = context[node.output[0]].reshape(*exp_oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            rtlsim_inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            rtlsim_output = self.rtlsim(sim, rtlsim_inp)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                rtlsim_output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+            # load and reshape output
+            output = np.load(out_npy_path)
+            output = np.asarray([output], dtype=np.float32).reshape(*exp_oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        assert (
+            context[node.output[0]].shape == exp_oshape
+        ), """Output
+        shape doesn't match expected shape (1, ofm_dim, ofm_dim, k*k*ifm_ch)."""
diff --git a/src/finn/custom_op/fpgadataflow/streamingdatawidthconverter_batch.py b/src/finn/custom_op/fpgadataflow/streamingdatawidthconverter_batch.py
index f666becdbcceca6ca202907610595f8c0069c5a0..748880400d53f9bb6e90585234e2cfc21a366ba6 100644
--- a/src/finn/custom_op/fpgadataflow/streamingdatawidthconverter_batch.py
+++ b/src/finn/custom_op/fpgadataflow/streamingdatawidthconverter_batch.py
@@ -28,6 +28,7 @@
 
 import os
 import numpy as np
+import math
 
 from finn.custom_op.fpgadataflow import HLSCustomOp
 from finn.core.datatype import DataType
@@ -51,6 +52,10 @@ class StreamingDataWidthConverter_Batch(HLSCustomOp):
             "outWidth": ("i", True, 0),
             # FINN DataTypes for inputs/outputs
             "dataType": ("s", True, ""),
+            # Toggle between hls or IPI implementation
+            # hls - use the hls generated IP during stitching
+            # vivado - use the AXI Infrastructure DWC
+            "impl_style": ("s", False, "hls"),
         }
         my_attrs.update(super().get_nodeattr_types())
         return my_attrs
@@ -381,3 +386,96 @@ class StreamingDataWidthConverter_Batch(HLSCustomOp):
             exp_shape
         ), """Output
         shape doesn't match expected shape, should be same as input shape"""
+
+    def code_generation_ipi(self):
+        impl_style = self.get_nodeattr("impl_style")
+        if impl_style == "hls":
+            return super().code_generation_ipi()
+        elif impl_style == "vivado":
+            cmd = []
+            node_name = self.onnx_node.name
+            # create a hierarchy for this layer, with the same port names
+            clk_name = self.get_verilog_top_module_intf_names()["clk"][0]
+            rst_name = self.get_verilog_top_module_intf_names()["rst"][0]
+            dout_name = self.get_verilog_top_module_intf_names()["m_axis"][0]
+            din_name = self.get_verilog_top_module_intf_names()["s_axis"][0]
+            cmd.append("create_bd_cell -type hier %s" % node_name)
+            cmd.append("create_bd_pin -dir I -type clk /%s/%s" % (node_name, clk_name))
+            cmd.append("create_bd_pin -dir I -type rst /%s/%s" % (node_name, rst_name))
+            cmd.append(
+                "create_bd_intf_pin -mode Master "
+                "-vlnv xilinx.com:interface:axis_rtl:1.0 /%s/%s"
+                % (node_name, dout_name)
+            )
+            cmd.append(
+                "create_bd_intf_pin -mode Slave "
+                "-vlnv xilinx.com:interface:axis_rtl:1.0 /%s/%s" % (node_name, din_name)
+            )
+            # instantiate and configure DWC
+            cmd.append(
+                "create_bd_cell -type ip "
+                "-vlnv xilinx.com:ip:axis_dwidth_converter:1.1 /%s/dwc" % node_name
+            )
+            cmd.append(
+                "set_property -dict "
+                "[list CONFIG.S_TDATA_NUM_BYTES.VALUE_SRC PROPAGATED] "
+                "[get_bd_cells /%s/dwc]" % node_name
+            )
+            cmd.append(
+                "set_property -dict "
+                "[list CONFIG.M_TDATA_NUM_BYTES {%d}] [get_bd_cells /%s/dwc]"
+                % (np.ceil(self.get_outstream_width() / 8), node_name)
+            )
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/dwc/M_AXIS] "
+                "[get_bd_intf_pins %s/%s]" % (node_name, node_name, dout_name)
+            )
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/dwc/S_AXIS] "
+                "[get_bd_intf_pins %s/%s]" % (node_name, node_name, din_name)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] [get_bd_pins %s/dwc/aresetn]"
+                % (node_name, rst_name, node_name)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] [get_bd_pins %s/dwc/aclk]"
+                % (node_name, clk_name, node_name)
+            )
+            return cmd
+        else:
+            raise Exception(
+                "DWC implementation style %s not supported, please use hls or vivado"
+                % impl_style
+            )
+
+    def lut_estimation(self):
+        """Calculates resource estimations for LUTs"""
+        impl = self.get_nodeattr("impl_style")
+        inw = self.get_instream_width()
+        outw = self.get_outstream_width()
+
+        minw = min(inw, outw)
+        maxw = max(inw, outw)
+
+        # sometimes withs aren't directly divisible
+        # this requires going up from input width to least common multiple
+        # then down to output width
+        intw = abs(maxw*minw) // math.gcd(maxw, minw)
+
+        # we assume a shift-based implementation
+        # even if we don't use LUTs explicitly, we make some unavailable
+        # to other logic because they're tied into the DWC control sets
+
+        cnt_luts = 0
+        cset_luts = 0
+
+        if inw != intw:
+            cnt_luts += abs(math.ceil(math.log(inw/intw, 2)))
+            cset_luts += intw
+        if intw != outw:
+            cnt_luts += abs(math.ceil(math.log(intw / outw, 2)))
+            cset_luts += outw
+
+        return int(cnt_luts+cset_luts)
+
diff --git a/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py b/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py
index f650442401b49f1ad0a602b6b2ad3e50fbb5e5c2..37c6ad4894a1a82878f68c92501844d7fd45d353 100644
--- a/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py
+++ b/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py
@@ -28,9 +28,6 @@
 
 import math
 import os
-import subprocess
-from shutil import copy
-
 import numpy as np
 
 from onnx import TensorProto, helper
@@ -39,6 +36,7 @@ from finn.custom_op.fpgadataflow import HLSCustomOp
 from finn.util.basic import (
     interleave_matrix_outer_dim_from_partitions,
     roundup_to_integer_multiple,
+    calculate_matvec_accumulator_range,
 )
 from finn.util.data_packing import (
     npy_to_rtlsim_input,
@@ -75,6 +73,8 @@ class StreamingFCLayer_Batch(HLSCustomOp):
             "inputDataType": ("s", True, ""),
             "weightDataType": ("s", True, ""),
             "outputDataType": ("s", True, ""),
+            # FINN DataType for accumulator -- auto-computed and updated
+            "accDataType": ("s", False, "INT32"),
             # use xnor-popcount for binary weights/inputs, thus treating them
             # as bipolar
             "binaryXnorMode": ("i", False, 0),
@@ -87,7 +87,8 @@ class StreamingFCLayer_Batch(HLSCustomOp):
             "numInputVectors": ("ints", False, [1]),
             # memory mode for the FC weights
             # const -- embedded weights, default, long compile/synth times
-            # decoupled -- streaming weights
+            # decoupled -- streaming weights with weight streamer packaged inside IP
+            # external -- streaming weights with external streamer
             "mem_mode": ("s", False, "const"),
             # FPGA resource type for memories in decoupled mode
             # auto -- let Vivado decide
@@ -99,23 +100,6 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         my_attrs.update(super().get_nodeattr_types())
         return my_attrs
 
-    def get_verilog_top_module_name(self):
-        "Return the Verilog top module name for this node."
-
-        node = self.onnx_node
-        # set top name depending on mem_mode
-        mem_mode = self.get_nodeattr("mem_mode")
-        if mem_mode == "const":
-            prefixed_top_name = "%s_%s" % (node.name, node.name)
-        elif mem_mode == "decoupled":
-            prefixed_top_name = "%s_memstream" % (node.name)
-        else:
-            raise Exception(
-                """Please set mem_mode to "const" or "decoupled", currently no other
-                parameter value is supported!"""
-            )
-        return prefixed_top_name
-
     def calc_wmem(self):
         """Calculates and returns WMEM."""
         mw = self.get_nodeattr("MW")
@@ -240,11 +224,21 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         Q = self.get_nodeattr("SIMD")
         wdt = self.get_weight_datatype()
         W = wdt.bitwidth()
-        D_in = self.get_instream_width()
-        D_out = self.get_outstream_width()
+        D_in = self.get_nodeattr("MW")
+        D_out = self.get_nodeattr("MH")
         omega = (D_in * D_out) / (Q * P)
         return P * (math.ceil(omega / 512)) * (math.ceil((Q * W) / 36))
 
+    def bram_efficiency_estimation(self):
+        wdt = self.get_weight_datatype()
+        W = wdt.bitwidth()
+        D_in = self.get_nodeattr("MW")
+        D_out = self.get_nodeattr("MH")
+        bram16_est = self.bram_estimation()
+        wbits = W * D_in * D_out
+        bram16_est_capacity = bram16_est * 36 * 512
+        return wbits / bram16_est_capacity
+
     def lut_estimation(self):
         """Calculates resource estimations for LUTs based on:
         - FINN-R: An End-to-End Deep-Learning Framework for Fast
@@ -267,6 +261,17 @@ class StreamingFCLayer_Batch(HLSCustomOp):
 
         return c0 + c1 * (P * Q) * (W * A)
 
+    def get_exp_cycles(self):
+        pe = self.get_nodeattr("PE")
+        simd = self.get_nodeattr("SIMD")
+        num_inp_vec = self.get_nodeattr("numInputVectors")
+        mh = self.get_nodeattr("MH")
+        mw = self.get_nodeattr("MW")
+        # since mmv != 1 is not supported yet, we set mmv for now to 1
+        mmv = 1
+        exp_cycles = (mh / pe) * (mw / simd) * np.prod(num_inp_vec) / mmv
+        return int(exp_cycles)
+
     def get_input_datatype(self):
         """Returns FINN DataType of input."""
         return DataType[self.get_nodeattr("inputDataType")]
@@ -290,12 +295,18 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         return out_width
 
     def get_weightstream_width(self):
-        """Returns weight stream width. Used in decoupled mode."""
-        pe = self.get_nodeattr("PE")
-        simd = self.get_nodeattr("SIMD")
-        wp = self.get_weight_datatype().bitwidth()
-        w_width = pe * simd * wp
-        return w_width
+        """Returns weight stream width. Used only in decoupled mode."""
+        if (
+            self.get_nodeattr("mem_mode") == "decoupled"
+            or self.get_nodeattr("mem_mode") == "external"
+        ):
+            pe = self.get_nodeattr("PE")
+            simd = self.get_nodeattr("SIMD")
+            wp = self.get_weight_datatype().bitwidth()
+            w_width = pe * simd * wp
+            return w_width
+        else:
+            return 0
 
     def get_weightstream_width_padded(self):
         """Returns weight stream width padded to a multiple of 8. This is required
@@ -416,6 +427,51 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         ret = np.flip(ret, axis=-1)
         return ret
 
+    def minimize_accumulator_width(self, model):
+        weights = model.get_initializer(self.onnx_node.input[1])
+        if len(self.onnx_node.input) > 2:
+            thresholds = model.get_initializer(self.onnx_node.input[2])
+        else:
+            thresholds = None
+        idt = self.get_input_datatype()
+        # calculate minimum and maximum values of accumulator
+        (acc_min, acc_max) = calculate_matvec_accumulator_range(weights, idt)
+        if thresholds is not None:
+            threshold_tensor = self.get_hls_compatible_threshold_tensor(thresholds)
+            # set threshold datatype (and accumulator datatype implicitly)
+            min_threshold = thresholds.min()
+            max_threshold = thresholds.max()
+            # get range required by threshold values
+            tdt_min = min(acc_min, min_threshold)
+            tdt_max = max(acc_max, max_threshold)
+            if tdt_min < 0:
+                if abs(tdt_min) > tdt_max:
+                    tdt = DataType.get_smallest_possible(tdt_min)
+                else:
+                    tdt = DataType.get_smallest_possible(0 - tdt_max)
+            else:
+                tdt = DataType.get_smallest_possible(tdt_max)
+            assert np.vectorize(tdt.allowed)(
+                threshold_tensor
+            ).all(), "Thresholds can't be expressed with type %s" % str(tdt)
+            self.set_nodeattr("accDataType", tdt.name)
+        else:
+            if acc_min < 0:
+                if abs(acc_min) > acc_max:
+                    adt = DataType.get_smallest_possible(acc_min)
+                else:
+                    adt = DataType.get_smallest_possible(0 - acc_max)
+            else:
+                adt = DataType.get_smallest_possible(acc_max)
+            # ensure a datatype divisible by 8-bits in case this is the last node
+            bw = roundup_to_integer_multiple(adt.bitwidth(), 8)
+            new_adt_name = adt.name.replace(str(adt.bitwidth()), str(bw))
+            adt = DataType[new_adt_name]
+            self.set_nodeattr("accDataType", adt.name)
+            # for no-activation nodes, output dt = acc dt
+            self.set_nodeattr("outputDataType", adt.name)
+        return DataType[self.get_nodeattr("accDataType")]
+
     def get_hls_compatible_threshold_tensor(self, orig_thres_matrix):
         """Convert the original numpy weight matrix orig_weight_matrix into
         a form suitable for passing to the hlslib call:
@@ -471,7 +527,8 @@ class StreamingFCLayer_Batch(HLSCustomOp):
 
     def generate_params(self, model, path):
         mem_mode = self.get_nodeattr("mem_mode")
-        # weights
+        code_gen_dir = path
+        # weights, if not external
         weights = model.get_initializer(self.onnx_node.input[1])
         # convert weights into hlslib-compatible format
         weight_tensor = self.get_hls_compatible_weight_tensor(weights)
@@ -480,7 +537,6 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         # so use it as such for weight generation
         if self.get_weight_datatype() == DataType.BIPOLAR:
             export_wdt = DataType.BINARY
-        code_gen_dir = path
 
         if mem_mode == "const":
             """Saves weights into params.h"""
@@ -510,62 +566,54 @@ class StreamingFCLayer_Batch(HLSCustomOp):
             f_weights.write(weight_hls_code)
             f_weights.close()
 
-        elif mem_mode == "decoupled":
+        elif mem_mode == "decoupled" or mem_mode == "external":
             """Saves weights in corresponding file format for cppsim or rtlsim"""
             # transpose weight tensor from (1, PE, WMEM, SIMD) to (1, WMEM, PE, SIMD)
-            # and save as unflipped weight tensor to be able to differentiate between
-            # flipped an unflipped weight tensor (has to be flipped for cppsim)
-
             weight_tensor_unflipped = np.transpose(weight_tensor, (0, 2, 1, 3))
 
-            # flip PE dimension and reverse SIMD flip for saving weights in .npy
-            weight_tensor_flipped = np.flip(weight_tensor_unflipped, axis=-2)
-            weight_tensor_flipped = np.flip(weight_tensor_flipped, axis=-1)
+            # reverse SIMD flip for saving weights in .npy
+            weight_tensor_simd_flipped = np.flip(weight_tensor_unflipped, axis=-1)
+            # PE flip for saving weights in .dat
+            weight_tensor_pe_flipped = np.flip(weight_tensor_unflipped, axis=-2)
 
-            # reshape weight tensor (flipped and unflipped) to desired shape
+            # reshape weight tensor (simd_flipped and pe_flipped) to desired shape
             pe = self.get_nodeattr("PE")
             simd = self.get_nodeattr("SIMD")
-            # unflipped
-            weight_tensor_unflipped = weight_tensor_unflipped.reshape(1, -1, pe * simd)
-            weight_tensor_unflipped = weight_tensor_unflipped.copy()
+            # simd_flipped
+            weight_tensor_simd_flipped = weight_tensor_simd_flipped.reshape(
+                1, -1, pe * simd
+            )
+            weight_tensor_simd_flipped = weight_tensor_simd_flipped.copy()
             # flipped
-            weight_tensor_flipped = weight_tensor_flipped.reshape(1, -1, pe * simd)
-            weight_tensor_flipped = weight_tensor_flipped.copy()
+            weight_tensor_pe_flipped = weight_tensor_pe_flipped.reshape(
+                1, -1, pe * simd
+            )
+            weight_tensor_pe_flipped = weight_tensor_pe_flipped.copy()
 
             """Saves weights into .npy file"""
-            np.save(os.path.join(code_gen_dir, "weights.npy"), weight_tensor_flipped)
-
-            """Saves weights into .dat file"""
-            # convert weight values into hexstring
-            weight_width = self.get_weightstream_width()
-            # pad to nearest 4 bits to get hex strings
-            weight_width_padded = roundup_to_integer_multiple(weight_width, 4)
-            weight_tensor_unflipped = pack_innermost_dim_as_hex_string(
-                weight_tensor_unflipped, export_wdt, weight_width_padded, prefix=""
-            )
-            weight_stream_len = np.prod(weight_tensor_unflipped.shape)
-            factor = math.ceil(weight_stream_len / 1024)
-            # add zeroes to pad out file to 1024 entries
-            weight_stream = weight_tensor_unflipped.flatten()
-            pad_amt = (factor * 1024) - weight_stream_len
-            weight_stream = np.pad(
-                weight_stream, (0, pad_amt), mode="constant", constant_values="0"
+            np.save(
+                os.path.join(code_gen_dir, "weights.npy"), weight_tensor_simd_flipped
             )
-            weight_stream = weight_stream.copy()
-            i = 0
-            j = 0
-            for val in weight_stream:
-                if i == 1024:
-                    i = 0
-                    j += 1
-                with open("{}/memblock_{}.dat".format(code_gen_dir, j), "a+") as f:
-                    f.write(val + "\n")
-                i += 1
 
+            if mem_mode == "decoupled":
+                """Saves weights into .dat file"""
+                # convert weight values into hexstring
+                weight_width = self.get_weightstream_width()
+                # pad to nearest 4 bits to get hex strings
+                weight_width_padded = roundup_to_integer_multiple(weight_width, 4)
+                weight_tensor_pe_flipped = pack_innermost_dim_as_hex_string(
+                    weight_tensor_pe_flipped, export_wdt, weight_width_padded, prefix=""
+                )
+                # add zeroes to pad out file to 1024 entries
+                weight_stream = weight_tensor_pe_flipped.flatten()
+                weight_stream = weight_stream.copy()
+                with open("{}/memblock_0.dat".format(code_gen_dir), "a+") as f:
+                    for val in weight_stream:
+                        f.write(val + "\n")
         else:
             raise Exception(
-                """Please set mem_mode to "const"i or "decoupled", currently no other
-                    parameter value is supported!"""
+                """Please set mem_mode to "const", "decoupled", or "external",
+                currently no other parameter value is supported!"""
             )
 
         # save thresholds in thresh.h
@@ -573,7 +621,6 @@ class StreamingFCLayer_Batch(HLSCustomOp):
             thresholds = model.get_initializer(self.onnx_node.input[2])
             if thresholds is not None:
                 threshold_tensor = self.get_hls_compatible_threshold_tensor(thresholds)
-                tdt = DataType.INT32
                 # use UINT32 threshold export for bipolar times bipolar
                 inp_is_bipolar = self.get_input_datatype() == DataType.BIPOLAR
                 wt_is_bipolar = self.get_weight_datatype() == DataType.BIPOLAR
@@ -583,8 +630,12 @@ class StreamingFCLayer_Batch(HLSCustomOp):
                 bin_xnor_mode = self.get_nodeattr("binaryXnorMode") == 1
                 inp_is_bipolar = inp_is_bipolar or (inp_is_binary and bin_xnor_mode)
                 wt_is_bipolar = wt_is_bipolar or (wt_is_binary and bin_xnor_mode)
-                if inp_is_bipolar and wt_is_bipolar:
-                    tdt = DataType.UINT32
+                # get computed threshold datatype from attribute
+                tdt = DataType[self.get_nodeattr("accDataType")]
+
+                assert np.vectorize(tdt.allowed)(
+                    threshold_tensor
+                ).all(), "Thresholds can't be expressed with type %s" % str(tdt)
                 thresholds_hls_code = numpy_to_hls_code(
                     threshold_tensor, tdt, "thresholds", False, True
                 )
@@ -613,6 +664,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
 
     def execute_node(self, context, graph):
         mode = self.get_nodeattr("exec_mode")
+        mem_mode = self.get_nodeattr("mem_mode")
         node = self.onnx_node
 
         # TODO ensure codegen dir exists
@@ -681,7 +733,25 @@ class StreamingFCLayer_Batch(HLSCustomOp):
             )
             super().reset_rtlsim(sim)
             super().toggle_clk(sim)
-            output = self.rtlsim(sim, inp)
+            if mem_mode == "external" or mem_mode == "decoupled":
+                wnbits = self.get_weightstream_width()
+                export_wdt = self.get_weight_datatype()
+                # we have converted bipolar weights to binary for export,
+                # so use it as such for weight generation
+                if self.get_weight_datatype() == DataType.BIPOLAR:
+                    export_wdt = DataType.BINARY
+                wei = npy_to_rtlsim_input(
+                    "{}/weights.npy".format(code_gen_dir), export_wdt, wnbits
+                )
+                num_w_reps = np.prod(self.get_nodeattr("numInputVectors"))
+                io_dict = {
+                    "inputs": {"in0": inp, "weights": wei * num_w_reps},
+                    "outputs": {"out": []},
+                }
+                self.rtlsim_multi_io(sim, io_dict)
+                output = io_dict["outputs"]["out"]
+            else:
+                output = self.rtlsim(sim, inp)
             odt = self.get_output_datatype()
             target_bits = odt.bitwidth()
             packed_bits = self.get_outstream_width()
@@ -712,12 +782,12 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         if mem_mode == "const":
             # self.code_gen_dict["$GLOBALS$"] += ['#include "params.h"']
             pass
-        elif mem_mode == "decoupled":
+        elif mem_mode == "decoupled" or mem_mode == "external":
             self.code_gen_dict["$GLOBALS$"] += ['#include "mvau.hpp"']
         else:
             raise Exception(
-                """Please set mem_mode to "const" or "decoupled", currently no other
-                    parameter value is supported!"""
+                """Please set mem_mode to "const", "decoupled", or "external",
+                currently no other parameter value is supported!"""
             )
         if self.calc_tmem() != 0:
             # TODO find a better way of checking for no pregenerated thresholds
@@ -740,7 +810,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
                 numReps,
             )
         ]
-        if mem_mode == "decoupled":
+        if mem_mode == "decoupled" or mem_mode == "external":
             wdt = self.get_weight_datatype()
             self.code_gen_dict["$DEFINES$"].append(
                 "#define WP1 {}\n".format(wdt.bitwidth())
@@ -766,7 +836,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
         )
 
         mem_mode = self.get_nodeattr("mem_mode")
-        if mem_mode == "decoupled":
+        if mem_mode == "decoupled" or mem_mode == "external":
             wdt = self.get_weight_datatype()
             elem_bits = wdt.bitwidth()
             packed_bits = self.get_weightstream_width()
@@ -790,7 +860,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
             'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
         )
 
-        if mem_mode == "decoupled":
+        if mem_mode == "decoupled" or mem_mode == "external":
             self.code_gen_dict["$STREAMDECLARATIONS$"].append(
                 'hls::stream<ap_uint<{}>> weights ("weights");'.format(
                     self.get_weightstream_width()
@@ -818,7 +888,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
                     self.get_nodeattr("resType"),
                 )
             ]
-        elif mem_mode == "decoupled":
+        elif mem_mode == "decoupled" or mem_mode == "external":
             wdt = self.get_weight_datatype()
             if wdt == DataType.BIPOLAR:
                 export_wdt = DataType.BINARY
@@ -839,8 +909,8 @@ class StreamingFCLayer_Batch(HLSCustomOp):
 
         else:
             raise Exception(
-                """Please set mem_mode to "const" or "decoupled", currently no other
-                    parameter value is supported!"""
+                """Please set mem_mode to "const", "decoupled", or "external",
+                currently no other parameter value is supported!"""
             )
 
     def dataoutstrm(self):
@@ -886,7 +956,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
                     self.get_outstream_width(),
                 )
             ]
-        elif mem_mode == "decoupled":
+        elif mem_mode == "decoupled" or mem_mode == "external":
             self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
                 """void {}(
                     hls::stream<ap_uint<{}>> &in0,
@@ -935,7 +1005,7 @@ class StreamingFCLayer_Batch(HLSCustomOp):
                     "complete dim=1"
                 )
             )
-        elif mem_mode == "decoupled":
+        elif mem_mode == "decoupled" or mem_mode == "external":
             self.code_gen_dict["$PRAGMAS$"].append(
                 "#pragma HLS INTERFACE axis port=weights"
             )
@@ -945,8 +1015,8 @@ class StreamingFCLayer_Batch(HLSCustomOp):
 
         else:
             raise Exception(
-                """Please set mem_mode to "const", currently no other
-                    parameter value is supported!"""
+                """Please set mem_mode to "const", "decoupled", or external,
+                currently no other parameter value is supported!"""
             )
 
         # the threshold tensor is acc_type [PE][TMEM][N_THRES]
@@ -967,111 +1037,105 @@ class StreamingFCLayer_Batch(HLSCustomOp):
                 )
             )
 
-    def code_generation_ipgen(self, model, fpgapart, clk):
-        # generate code for all mem_mode of MVAU/FCLayer unit
-        super().code_generation_ipgen(model, fpgapart, clk)
-
-        # if mem_mode = "decoupled" generate code for verilog wrapper
+    def code_generation_ipi(self):
+        cmd = []
+        # add streamer if needed
         mem_mode = self.get_nodeattr("mem_mode")
         if mem_mode == "decoupled":
-            # empty code gen dictionary for new entries
-            self.code_gen_dict.clear()
-            self.code_gen_dict["$TOPNAME$"] = [
-                "{}_memstream".format(self.onnx_node.name)
-            ]
-            self.code_gen_dict["$LAYER_NAME$"] = [
-                "{}_{}".format(self.onnx_node.name, self.onnx_node.name)
-            ]
-            # make instream width a multiple of 8 for AXI stream interface
-            in_width = self.get_instream_width_padded()
-            self.code_gen_dict["$IN_RANGE$"] = ["[{}:0]".format(in_width - 1)]
-            self.code_gen_dict["$OUT_RANGE$"] = [
-                "[{}:0]".format(self.get_outstream_width_padded() - 1)
-            ]
-            # make weight stream width a multiple of 8 for AXI stream interface
-            weight_width = self.get_weightstream_width_padded()
-            self.code_gen_dict["$WEIGHT_RANGE$"] = ["[{}:0]".format(weight_width - 1)]
-            self.code_gen_dict["$WEIGHT_WIDTH$"] = [str(weight_width)]
-            self.code_gen_dict["$WSTREAM_DEPTH$"] = [str(self.calc_wmem())]
-            self.code_gen_dict["$MEM_DEPTH$"] = [
-                str(roundup_to_integer_multiple(self.calc_wmem(), 1024))
-            ]
-            self.code_gen_dict["$RAM_STYLE$"] = [self.get_nodeattr("ram_style")]
-
-            template = self.decoupled_wrapper
-
-            for key in self.code_gen_dict:
-                # transform list into long string separated by '\n'
-                code_gen_line = "\n".join(self.code_gen_dict[key])
-                template = template.replace(key, code_gen_line)
-            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
-            f = open(
-                os.path.join(
-                    code_gen_dir, "{}_memstream.v".format(self.onnx_node.name)
-                ),
-                "w",
+            node_name = self.onnx_node.name
+            # create a hierarchy for this layer, with the same port names
+            clk_name = self.get_verilog_top_module_intf_names()["clk"][0]
+            rst_name = self.get_verilog_top_module_intf_names()["rst"][0]
+            dout_name = self.get_verilog_top_module_intf_names()["m_axis"][0]
+            din_name = self.get_verilog_top_module_intf_names()["s_axis"][0]
+            cmd.append("create_bd_cell -type hier %s" % node_name)
+            cmd.append("create_bd_pin -dir I -type clk /%s/%s" % (node_name, clk_name))
+            cmd.append("create_bd_pin -dir I -type rst /%s/%s" % (node_name, rst_name))
+            cmd.append(
+                "create_bd_intf_pin -mode Master "
+                "-vlnv xilinx.com:interface:axis_rtl:1.0 /%s/%s"
+                % (node_name, dout_name)
             )
-            f.write(template)
-            f.close()
-            self.code_gen_dict.clear()
-
-    def ipgen_singlenode_code(self):
-        # generate ip block of MVAU/FCLayer unit for all mem modes
-        super().ipgen_singlenode_code()
-
-        mem_mode = self.get_nodeattr("mem_mode")
-        if mem_mode == "decoupled":
-            # copy necessary verilog and .dat files
-            # into verilog folder in code generation folder
-            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
-            verilog_folder = "{}/project_{}/sol1/impl/verilog/".format(
-                code_gen_dir, self.onnx_node.name
+            cmd.append(
+                "create_bd_intf_pin -mode Slave "
+                "-vlnv xilinx.com:interface:axis_rtl:1.0 /%s/%s" % (node_name, din_name)
             )
-            # copy memstream components from finn-rtllib
-            memstream_dir = "/workspace/finn/finn-rtllib/memstream/hdl/"
-            for file in os.listdir(memstream_dir):
-                if file.endswith(".v"):
-                    verilog_file = os.path.join(memstream_dir, file)
-                    copy(verilog_file, verilog_folder)
-            # copy .dat files of weights
-            for file in os.listdir(code_gen_dir):
-                if file.endswith(".dat"):
-                    dat_file = os.path.join(code_gen_dir, file)
-                    copy(dat_file, verilog_folder)
-            # copy verilog wrapper
-            verilog_wrapper = "{}/{}_memstream.v".format(
-                code_gen_dir, self.onnx_node.name
+            # instantiate the hls ip
+            cmd.append(
+                "create_bd_cell -type ip -vlnv %s /%s/%s"
+                % (self.get_nodeattr("ip_vlnv"), node_name, node_name)
             )
-            copy(verilog_wrapper, verilog_folder)
-            # prepare the IP packaging tcl template
-            template = templates.ip_package_tcl
-            self.code_gen_dict["$TOPNAME$"] = [
-                "{}_memstream".format(self.onnx_node.name)
-            ]
-            self.code_gen_dict["$VERILOG_DIR$"] = [verilog_folder]
-            for key in self.code_gen_dict:
-                # transform list into long string separated by '\n'
-                code_gen_line = "\n".join(self.code_gen_dict[key])
-                template = template.replace(key, code_gen_line)
-            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
-            f = open(os.path.join(verilog_folder, "package_ip.tcl"), "w")
-            f.write(template)
-            f.close()
-            # create a shell script and call Vivado to invoke the IP pkg script
-            make_project_sh = verilog_folder + "/make_ip.sh"
-            working_dir = os.environ["PWD"]
-            with open(make_project_sh, "w") as f:
-                f.write("#!/bin/bash \n")
-                f.write("cd {}\n".format(verilog_folder))
-                f.write("vivado -mode batch -source package_ip.tcl\n")
-                f.write("cd {}\n".format(working_dir))
-            bash_command = ["bash", make_project_sh]
-            process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-            process_compile.communicate()
-            # re-set ip_path to point to the new packaged IP
-            self.set_nodeattr("ip_path", verilog_folder)
-            vlnv = "xilinx.com:hls:%s:1.0" % (
-                "{}_memstream".format(self.onnx_node.name)
+            # instantiate a streamer and connect it to the HLS IP
+            strm_vlnv = "xilinx.com:user:memstream:1.0"
+            strm_inst = node_name + "_wstrm"
+            cmd.append(
+                "create_bd_cell -type ip -vlnv %s /%s/%s"
+                % (strm_vlnv, node_name, strm_inst)
+            )
+            cmd.append(
+                "set_property -dict [list "
+                "CONFIG.NSTREAMS {1} "
+                "CONFIG.MEM_DEPTH {%d} "
+                "CONFIG.MEM_WIDTH {%d} "
+                "CONFIG.MEM_INIT {%s} "
+                "CONFIG.RAM_STYLE {%s} "
+                "CONFIG.STRM0_DEPTH {%d} "
+                "CONFIG.STRM0_WIDTH {%d} "
+                "CONFIG.STRM0_OFFSET {0} "
+                "] [get_bd_cells /%s/%s]"
+                % (
+                    self.calc_wmem(),
+                    self.get_weightstream_width_padded(),
+                    self.get_nodeattr("code_gen_dir_ipgen") + "/",
+                    self.get_nodeattr("ram_style"),
+                    self.calc_wmem(),
+                    self.get_weightstream_width_padded(),
+                    node_name,
+                    strm_inst,
+                )
             )
-            self.set_nodeattr("ip_vlnv", vlnv)
-            self.code_gen_dict.clear()
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/%s/m_axis_0] "
+                "[get_bd_intf_pins %s/%s/weights_V_V]"
+                % (node_name, strm_inst, node_name, node_name)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] [get_bd_pins %s/%s/aresetn]"
+                % (node_name, rst_name, node_name, strm_inst)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] [get_bd_pins %s/%s/aclk]"
+                % (node_name, clk_name, node_name, strm_inst)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] [get_bd_pins %s/%s/%s]"
+                % (node_name, rst_name, node_name, node_name, rst_name)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] [get_bd_pins %s/%s/%s]"
+                % (node_name, clk_name, node_name, node_name, clk_name)
+            )
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/%s] "
+                "[get_bd_intf_pins %s/%s/%s]"
+                % (node_name, din_name, node_name, node_name, din_name)
+            )
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/%s] "
+                "[get_bd_intf_pins %s/%s/%s]"
+                % (node_name, dout_name, node_name, node_name, dout_name)
+            )
+            cmd.append("save_bd_design")
+        elif mem_mode == "const":
+            # base class impl sufficient for const mode
+            return super().code_generation_ipi()
+        else:
+            raise Exception("Unrecognized mem_mode for StreamingFCLayer")
+        return cmd
+
+    def get_verilog_top_module_intf_names(self):
+        intf_names = super().get_verilog_top_module_intf_names()
+        mem_mode = self.get_nodeattr("mem_mode")
+        if mem_mode == "external":
+            intf_names["s_axis"] = ["in0_V_V", "weights_V_V"]
+        return intf_names
diff --git a/src/finn/custom_op/fpgadataflow/streamingfifo.py b/src/finn/custom_op/fpgadataflow/streamingfifo.py
index 66190333ce8d71dafba99aaeae4fb2c973d67410..e2f96395ad74255ad67549255608cd52737e97d9 100644
--- a/src/finn/custom_op/fpgadataflow/streamingfifo.py
+++ b/src/finn/custom_op/fpgadataflow/streamingfifo.py
@@ -29,6 +29,7 @@ import os
 import numpy as np
 from shutil import copy
 import subprocess
+import math
 
 from finn.custom_op.fpgadataflow import HLSCustomOp
 from finn.core.datatype import DataType
@@ -51,6 +52,16 @@ class StreamingFIFO(HLSCustomOp):
             "folded_shape": ("ints", True, []),
             # FINN DataTypes for inputs/outputs
             "dataType": ("s", True, ""),
+            # Toggle between hls or IPI implementation
+            # rtl - use the hls generated IP during stitching
+            # vivado - use the AXI Infrastructure FIFO
+            "impl_style": ("s", False, "rtl"),
+            # FPGA resource type for FIFOs when impl_style is vivado
+            # auto -- let Vivado decide
+            # block -- use BRAM
+            # distributed -- use LUTRAM
+            # ultra -- use URAM (on UltraScale+)
+            "ram_style": ("s", False, "auto"),
         }
         my_attrs.update(super().get_nodeattr_types())
 
@@ -110,6 +121,8 @@ class StreamingFIFO(HLSCustomOp):
         ]
         # make instream width a multiple of 8 for axi interface
         in_width = self.get_instream_width_padded()
+        count_width = int(self.get_nodeattr("depth") - 1).bit_length()
+        self.code_gen_dict["$COUNT_RANGE$"] = ["[{}:0]".format(count_width - 1)]
         self.code_gen_dict["$IN_RANGE$"] = ["[{}:0]".format(in_width - 1)]
         self.code_gen_dict["$OUT_RANGE$"] = ["[{}:0]".format(in_width - 1)]
         self.code_gen_dict["$WIDTH$"] = [str(in_width)]
@@ -304,3 +317,137 @@ class StreamingFIFO(HLSCustomOp):
 
     def pragmas(self):
         pass
+
+    def code_generation_ipi(self):
+        impl_style = self.get_nodeattr("impl_style")
+        if impl_style == "rtl":
+            return super().code_generation_ipi()
+        elif impl_style == "vivado":
+            cmd = []
+            node_name = self.onnx_node.name
+            depth = self.get_nodeattr("depth")
+            ram_style = self.get_nodeattr("ram_style")
+            # create a hierarchy for this layer, with the same port names
+            clk_name = self.get_verilog_top_module_intf_names()["clk"][0]
+            rst_name = self.get_verilog_top_module_intf_names()["rst"][0]
+            dout_name = self.get_verilog_top_module_intf_names()["m_axis"][0]
+            din_name = self.get_verilog_top_module_intf_names()["s_axis"][0]
+            cmd.append("create_bd_cell -type hier %s" % node_name)
+            cmd.append("create_bd_pin -dir I -type clk /%s/%s" % (node_name, clk_name))
+            cmd.append("create_bd_pin -dir I -type rst /%s/%s" % (node_name, rst_name))
+            cmd.append(
+                "create_bd_intf_pin -mode Master "
+                "-vlnv xilinx.com:interface:axis_rtl:1.0 /%s/%s"
+                % (node_name, dout_name)
+            )
+            cmd.append(
+                "create_bd_intf_pin -mode Slave "
+                "-vlnv xilinx.com:interface:axis_rtl:1.0 /%s/%s" % (node_name, din_name)
+            )
+            # instantiate and configure DWC
+            cmd.append(
+                "create_bd_cell -type ip "
+                "-vlnv xilinx.com:ip:axis_data_fifo:2.0 /%s/fifo" % node_name
+            )
+            cmd.append(
+                "set_property -dict [list CONFIG.FIFO_DEPTH {%d}] "
+                "[get_bd_cells /%s/fifo]" % (depth, node_name)
+            )
+            cmd.append(
+                "set_property -dict [list CONFIG.FIFO_MEMORY_TYPE {%s}] "
+                "[get_bd_cells /%s/fifo]" % (ram_style, node_name)
+            )
+            cmd.append(
+                "set_property -dict [list CONFIG.TDATA_NUM_BYTES {%d}] "
+                "[get_bd_cells /%s/fifo]"
+                % (np.ceil(self.get_outstream_width() / 8), node_name)
+            )
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/fifo/M_AXIS] "
+                "[get_bd_intf_pins %s/%s]" % (node_name, node_name, dout_name)
+            )
+            cmd.append(
+                "connect_bd_intf_net [get_bd_intf_pins %s/fifo/S_AXIS] "
+                "[get_bd_intf_pins %s/%s]" % (node_name, node_name, din_name)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] "
+                "[get_bd_pins %s/fifo/s_axis_aresetn]"
+                % (node_name, rst_name, node_name)
+            )
+            cmd.append(
+                "connect_bd_net [get_bd_pins %s/%s] "
+                "[get_bd_pins %s/fifo/s_axis_aclk]" % (node_name, clk_name, node_name)
+            )
+            return cmd
+        else:
+            raise Exception(
+                "FIFO implementation style %s not supported, please use rtl or vivado"
+                % impl_style
+            )
+
+    def bram_estimation(self):
+        """Calculates resource estimation for BRAM"""
+        impl = self.get_nodeattr("impl_style")
+        ram_type = self.get_nodeattr("ram_style")
+        depth = self.get_nodeattr("depth")
+        W = self.get_instream_width()
+
+        if impl == "rtl" or (impl == "vivado" and ram_type != "block"):
+            # Non-BRAM based implementation
+            return 0
+
+        if W == 1:
+            return math.ceil(depth / 16384)
+        elif W == 2:
+            return math.ceil(depth / 8192)
+        elif W <= 4:
+            return (math.ceil(depth / 4096)) * (math.ceil(W / 4))
+        elif W <= 9:
+            return (math.ceil(depth / 2048)) * (math.ceil(W / 9))
+        elif W <= 18 or depth > 512:
+            return (math.ceil(depth / 1024)) * (math.ceil(W / 18))
+        else:
+            return (math.ceil(depth / 512)) * (math.ceil(W / 36))
+
+    def uram_estimation(self):
+        """Calculates resource estimation for URAM"""
+
+        impl = self.get_nodeattr("impl_style")
+        ram_type = self.get_nodeattr("ram_style")
+        depth = self.get_nodeattr("depth")
+        W = self.get_instream_width()
+
+        if impl == "rtl" or (impl == "vivado" and ram_type != "ultra"):
+            # Non-BRAM based implementation
+            return 0
+        else:
+            return (math.ceil(depth / 4096)) * (math.ceil(W / 72))
+
+
+    def bram_efficiency_estimation(self):
+        depth = self.get_nodeattr("depth")
+        W = self.get_instream_width()
+        bram16_est = self.bram_estimation()
+        if bram16_est == 0:
+            return 1
+        wbits = W * depth
+        bram16_est_capacity = bram16_est * 36 * 512
+        return wbits / bram16_est_capacity
+
+    def lut_estimation(self):
+        """Calculates resource estimations for LUTs"""
+        impl = self.get_nodeattr("impl_style")
+        ram_type = self.get_nodeattr("ram_style")
+        depth = self.get_nodeattr("depth")
+        W = self.get_instream_width()
+
+        address_luts = 2 * math.ceil(math.log(depth, 2))
+
+        if impl == "rtl" or (impl == "vivado" and ram_type == "distributed"):
+            ram_luts = (math.ceil(depth / 32)) * (math.ceil(W / 2))
+        else:
+            ram_luts = 0
+
+        return int(address_luts + ram_luts)
+
diff --git a/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py b/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py
index 2344e12f7e87634c189563f9cde7b1c861a3606e..4c772358648f402467cee628afe410d7bce83ede 100644
--- a/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py
+++ b/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py
@@ -95,6 +95,12 @@ class StreamingMaxPool_Batch(HLSCustomOp):
         folded_oshape = self.get_folded_output_shape()
         return np.prod(folded_oshape[:-1])
 
+    def get_exp_cycles(self):
+        # derived from StreamingMaxPool_Batch loop nest
+        k = self.get_nodeattr("PoolDim")
+        ifm_dim = self.get_nodeattr("ImgDim")
+        return ifm_dim * (ifm_dim + (ifm_dim / k))
+
     def get_instream_width(self):
         dt_bits = self.get_input_datatype().bitwidth()
         ifm_ch = self.get_nodeattr("NumChannels")
diff --git a/src/finn/custom_op/fpgadataflow/templates.py b/src/finn/custom_op/fpgadataflow/templates.py
index 5f526aa2aa1917144c7a048c9d9314aa9288a2d8..67cce8675681be47036ffaf3a3428b8c43284215 100644
--- a/src/finn/custom_op/fpgadataflow/templates.py
+++ b/src/finn/custom_op/fpgadataflow/templates.py
@@ -99,6 +99,7 @@ set_top $config_toplevelfxn
 open_solution sol1
 set_part $config_proj_part
 
+config_compile -ignore_long_run_time -disable_unroll_code_size_check
 config_interface -m_axi_addr64
 config_rtl -auto_prefix
 $EXTRA_DIRECTIVES$
@@ -145,39 +146,6 @@ wire m_axis_0_tready;
 wire m_axis_0_tvalid;
 wire $WEIGHT_RANGE$ m_axis_0_tdata;
 
-wire m_axis_0_tready_q;
-wire m_axis_0_tvalid_q;
-wire $WEIGHT_RANGE$ m_axis_0_tdata_q;
-
-wire m_axis_0_tready_q2;
-wire m_axis_0_tvalid_q2;
-wire $WEIGHT_RANGE$ m_axis_0_tdata_q2;
-
-reg m_axis_1_afull = 0;
-reg m_axis_1_tready = 1;
-wire m_axis_1_tvalid;
-wire $WEIGHT_RANGE$ m_axis_1_tdata;
-
-reg m_axis_2_afull = 0;
-reg m_axis_2_tready = 1;
-wire m_axis_2_tvalid;
-wire $WEIGHT_RANGE$ m_axis_2_tdata;
-
-reg m_axis_3_afull = 0;
-reg m_axis_3_tready = 1;
-wire m_axis_3_tvalid;
-wire $WEIGHT_RANGE$ m_axis_3_tdata;
-
-reg m_axis_4_afull = 0;
-reg m_axis_4_tready = 1;
-wire m_axis_4_tvalid;
-wire $WEIGHT_RANGE$ m_axis_4_tdata;
-
-reg m_axis_5_afull = 0;
-reg m_axis_5_tready = 1;
-wire m_axis_5_tvalid;
-wire $WEIGHT_RANGE$ m_axis_5_tdata;
-
 //memstream component
 
 memstream
@@ -193,27 +161,12 @@ memstream
 
 //widths per stream
 .STRM0_WIDTH($WEIGHT_WIDTH$),
-.STRM1_WIDTH($WEIGHT_WIDTH$),
-.STRM2_WIDTH($WEIGHT_WIDTH$),
-.STRM3_WIDTH($WEIGHT_WIDTH$),
-.STRM4_WIDTH($WEIGHT_WIDTH$),
-.STRM5_WIDTH($WEIGHT_WIDTH$),
 
 //depths per stream
 .STRM0_DEPTH($WSTREAM_DEPTH$),
-.STRM1_DEPTH(1),
-.STRM2_DEPTH(1),
-.STRM3_DEPTH(1),
-.STRM4_DEPTH(1),
-.STRM5_DEPTH(1),
 
 //offsets for each stream
-.STRM0_OFFSET(0),
-.STRM1_OFFSET(0),
-.STRM2_OFFSET(0),
-.STRM3_OFFSET(0),
-.STRM4_OFFSET(0),
-.STRM5_OFFSET(0)
+.STRM0_OFFSET(0)
 )
 mem
 (
@@ -231,52 +184,9 @@ mem
 .m_axis_0_afull(m_axis_0_afull),
 .m_axis_0_tready(m_axis_0_tready),
 .m_axis_0_tvalid(m_axis_0_tvalid),
-.m_axis_0_tdata(m_axis_0_tdata),
-
-.m_axis_1_afull(m_axis_1_afull),
-.m_axis_1_tready(m_axis_1_tready),
-.m_axis_1_tvalid(m_axis_1_tvalid),
-.m_axis_1_tdata(m_axis_1_tdata),
-
-.m_axis_2_afull(m_axis_2_afull),
-.m_axis_2_tready(m_axis_2_tready),
-.m_axis_2_tvalid(m_axis_2_tvalid),
-.m_axis_2_tdata(m_axis_2_tdata),
-
-.m_axis_3_afull(m_axis_3_afull),
-.m_axis_3_tready(m_axis_3_tready),
-.m_axis_3_tvalid(m_axis_3_tvalid),
-.m_axis_3_tdata(m_axis_3_tdata),
-
-.m_axis_4_afull(m_axis_4_afull),
-.m_axis_4_tready(m_axis_4_tready),
-.m_axis_4_tvalid(m_axis_4_tvalid),
-.m_axis_4_tdata(m_axis_4_tdata),
+.m_axis_0_tdata(m_axis_0_tdata)
 
-.m_axis_5_afull(m_axis_5_afull),
-.m_axis_5_tready(m_axis_5_tready),
-.m_axis_5_tvalid(m_axis_5_tvalid),
-.m_axis_5_tdata(m_axis_5_tdata)
 
-
-);
-
-
-Q_srl #(
-.depth(32),
-.width($WEIGHT_WIDTH$)
-)
-$LAYER_NAME$_w_fifo_1
-(
- .clock(ap_clk),
- .reset(!ap_rst_n),
- .i_d(m_axis_0_tdata),
- .i_v(m_axis_0_tvalid),
- .i_r(m_axis_0_tready),
- .o_d(m_axis_0_tdata_q),
- .o_v(m_axis_0_tvalid_q),
- .o_r(m_axis_0_tready_q),
- .count(fifo_0_count)
 );
 
 
@@ -290,17 +200,14 @@ MVA_Stream_U
 .in0_V_V_TDATA(in0_V_V_TDATA),		//$IN_RANGE$ input
 .in0_V_V_TVALID(in0_V_V_TVALID),  	//input
 .in0_V_V_TREADY(in0_V_V_TREADY),	//output
-.weights_V_V_TDATA(m_axis_0_tdata_q),	//$WEIGHT_RANGE$ input
-.weights_V_V_TVALID(m_axis_0_tvalid_q),	//input
-.weights_V_V_TREADY(m_axis_0_tready_q),	//output
+.weights_V_V_TDATA(m_axis_0_tdata),	//$WEIGHT_RANGE$ input
+.weights_V_V_TVALID(m_axis_0_tvalid),	//input
+.weights_V_V_TREADY(m_axis_0_tready),	//output
 .out_V_V_TDATA(out_V_V_TDATA),		//$OUT_RANGE$ output
 .out_V_V_TVALID(out_V_V_TVALID),	//output
 .out_V_V_TREADY(out_V_V_TREADY)		//input
 );
 
-// programmable full threshold at 16 elements
-assign m_axis_0_afull = (fifo_0_count > 16);
-
 endmodule
 """
 
@@ -343,6 +250,7 @@ set_property supported_families { \
   virtex7 Production \
   virtexu Production \
   virtexuplus Production \
+  virtexuplusHBM Production \
   zynq Production \
   zynquplus Production \
   aartix7 Production \
@@ -408,6 +316,7 @@ strm_fifo_wrapper = """
 module $TOPNAME$(
 ap_clk,
 ap_rst_n,
+count,
 in0_V_V_TDATA,
 in0_V_V_TVALID,
 in0_V_V_TREADY,
@@ -418,6 +327,7 @@ out_V_V_TREADY
 
 input   ap_clk;
 input   ap_rst_n;
+output $COUNT_RANGE$ count;
 input  $IN_RANGE$ in0_V_V_TDATA;
 input   in0_V_V_TVALID;
 output   in0_V_V_TREADY;
@@ -433,6 +343,7 @@ $LAYER_NAME$
 (
  .clock(ap_clk),
  .reset(!ap_rst_n),
+ .count(count),
  .i_d(in0_V_V_TDATA),
  .i_v(in0_V_V_TVALID),
  .i_r(in0_V_V_TREADY),
diff --git a/src/finn/custom_op/fpgadataflow/thresholding_batch.py b/src/finn/custom_op/fpgadataflow/thresholding_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..2429bf6190f822fb4a6c988fcbb34152d5a338e0
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/thresholding_batch.py
@@ -0,0 +1,575 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from math import ceil
+import os
+
+import numpy as np
+
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from finn.util.basic import interleave_matrix_outer_dim_from_partitions
+from finn.util.data_packing import (
+    npy_to_rtlsim_input,
+    numpy_to_hls_code,
+    rtlsim_output_to_npy,
+)
+from . import templates
+
+# ONNX i/o tensor shape assumptions for Thresholding:
+# input 0 is the input tensor, shape (..., NumChannels)
+# input 1 is the threshold tensor, shape (NumChannels, n_thres)
+# output 0 is the output tensor, shape (..., NumChannels) - same as input
+# the ... here can be any shape (representing groups of vectors)
+
+
+class Thresholding_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hls Thresholding_Batch function."""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+        self.decoupled_wrapper = templates.decoupled_wrapper
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "PE": ("i", True, 0),
+            "NumChannels": ("i", True, 0),
+            # string defining memory type
+            "ram_style": ("s", False, "distributed"),
+            # FINN DataTypes for inputs, weights, outputs
+            "inputDataType": ("s", True, ""),
+            "outputDataType": ("s", True, ""),
+            # input and output FIFO depths
+            "inFIFODepth": ("i", False, 0),
+            "outFIFODepth": ("i", False, 0),
+            # number of input vectors, examples:
+            # [1] is a single vector (like a FC layer with batch=1)
+            # [4] is four vectors (like a FC layer with batch=4)
+            # [1, 4, 4] is four * four vectors (like a conv layer with batch=1)
+            "numInputVectors": ("ints", False, [1]),
+            # initialization value for the thresholding accumulator
+            "ActVal": ("i", False, 0),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def calc_tmem(self):
+        """Calculates and returns TMEM."""
+        mh = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        return mh // pe
+
+    def make_shape_compatible_op(self, model):
+        oshape = self.get_normal_output_shape()
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # check input datatype against property
+        idt_name = self.get_input_datatype().name
+        exp_idt_name = self.get_nodeattr("inputDataType")
+        assert exp_idt_name == idt_name, "Bad input DataType for Thresholding layer"
+        # set output datatype from property
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(node.output[0], odt)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+
+        # verify that "backend" is set to "fpgadataflow"
+        backend_value = self.get_nodeattr("backend")
+        if backend_value == "fpgadataflow":
+            info_messages.append("Attribute backend is set correctly")
+        else:
+            info_messages.append('Attribute backend should be set to "fpgadataflow"')
+
+        # verify that all necessary attributes exist
+        # TODO collect automatically from get_nodeattr_types
+        try:
+            self.get_nodeattr("code_gen_dir_cppsim")
+            self.get_nodeattr("executable_path")
+            self.get_nodeattr("NumChannels")
+            self.get_nodeattr("PE")
+            self.get_nodeattr("inputDataType")
+            self.get_nodeattr("outputDataType")
+            info_messages.append("All necessary attributes exist")
+        except Exception:
+            info_messages.append(
+                """The required Threshold_Batch attributes do not exist."""
+            )
+
+        return info_messages
+
+    def bram_estimation(self):
+        """Calculates BRAM cost if resource set to BRAM"""
+        style = self.get_nodeattr("ram_style")
+        P = self.get_nodeattr("PE")
+        idt = self.get_input_datatype()
+        A = idt.bitwidth()
+        tmem = self.calc_tmem()
+
+        if style == "block" and tmem > 1:
+            return int(ceil(A * P / 16)) * int(ceil(tmem / 1024))
+        else:
+            return 0
+
+    def lut_estimation(self):
+        """Calculates LUT cost, taking memory resource type into account """
+        # TODO add in/out FIFO contributions
+        style = self.get_nodeattr("ram_style")
+        P = self.get_nodeattr("PE")
+        idt = self.get_input_datatype()
+        A = idt.bitwidth()
+        tmem = self.calc_tmem()
+        # cost of comparators
+        comparator_cost = A * P
+        # cost of LUTRAM
+        if style == "distributed" and tmem > 1:
+            lutram_cost = P * A * int(ceil(tmem / 64))
+        else:
+            lutram_cost = 0
+        # total cost
+        return comparator_cost + lutram_cost
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        return DataType[self.get_nodeattr("outputDataType")]
+
+    def get_instream_width(self):
+        i_bits = self.get_input_datatype().bitwidth()
+        return i_bits * self.get_nodeattr("PE")
+
+    def get_outstream_width(self):
+        o_bits = self.get_output_datatype().bitwidth()
+        return o_bits * self.get_nodeattr("PE")
+
+    def get_folded_input_shape(self):
+        ich = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        fold = ich // pe
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        folded_input_shape = tuple(vecs + [fold, pe])
+        return folded_input_shape
+
+    def get_folded_output_shape(self):
+        # same shape as input
+        return self.get_folded_input_shape()
+
+    def get_normal_input_shape(self):
+        ich = self.get_nodeattr("NumChannels")
+        vecs = list(self.get_nodeattr("numInputVectors"))
+        normal_input_shape = tuple(vecs + [ich])
+        return normal_input_shape
+
+    def get_normal_output_shape(self):
+        # same shape as input
+        return self.get_normal_input_shape()
+
+    def get_number_output_values(self):
+        nf = np.prod(self.get_folded_output_shape()[:-1])
+        return nf
+
+    def get_exp_cycles(self):
+        # Channels/PE * batch size * fmdim * fmdim
+        return np.prod(self.get_folded_output_shape()[:-1])
+
+    def get_template_param_values(self):
+        """Returns the template parameter values according to input, output and weight
+        data types."""
+        ret = dict()
+        inp_hls_str = self.get_input_datatype().get_hls_datatype_str()
+        out_hls_str = self.get_output_datatype().get_hls_datatype_str()
+        # fill in TSrcI
+        ret["TSrcI"] = "Slice<%s>" % inp_hls_str
+        # fill in TDstI
+        ret["TDstI"] = "Slice<%s>" % out_hls_str
+
+        return ret
+
+    def get_hls_compatible_threshold_tensor(self, orig_thres_matrix):
+        """Convert the original numpy weight matrix orig_weight_matrix into
+        a form suitable for passing to the hlslib call:
+        * ensure MH % PE == 0
+        * for unsigned inputs, ensure thresholds are positive
+        * interleave rows between PEs
+        * reshape into (PE, TMEM, n_thres_steps) and return
+        """
+        mh = self.get_nodeattr("NumChannels")
+        pe = self.get_nodeattr("PE")
+        tmem = mh // pe
+        assert mh % pe == 0, "Requirement NumChannels divisable by PE is violated."
+        assert (
+            orig_thres_matrix.ndim == 2
+        ), """Threshold matrix dimension is
+        not as expected (2)."""
+        n_thres_steps = orig_thres_matrix.shape[1]
+        if not self.get_input_datatype().signed():
+            # ensure all thresholds are nonnegative
+            assert (orig_thres_matrix >= 0).all()
+        # ensure all thresholds are integer
+        assert (orig_thres_matrix.astype(np.int32) == orig_thres_matrix).all()
+        ret = orig_thres_matrix
+        # ensure channels = mh , duplicating if necessary
+        if ret.shape[0] == 1:
+            ret = np.tile(ret, (mh, 1))
+        assert (
+            ret.shape[0] == mh
+        ), "Channels of threshold matrix are not as expected (mh)"
+        # distribute rows between PEs
+        ret = interleave_matrix_outer_dim_from_partitions(ret, pe)
+        assert (
+            ret.shape[0] == pe
+        ), """First dimension after distribution of the
+        rows between PEs is not as expected (pe)"""
+        assert (
+            ret.shape[1] == tmem
+        ), """Second dimension after distribution of the
+        rows between PEs is not as expected (tmem)"""
+        assert (
+            ret.shape[2] == n_thres_steps
+        ), """Third dimension after distribution of the
+        rows between PEs is not as expected (n_thres_steps)"""
+        return ret.reshape(1, pe, tmem, n_thres_steps)
+
+    def generate_params(self, model, path):
+        code_gen_dir = path
+        # save thresholds in thresh.h
+        thresholds = model.get_initializer(self.onnx_node.input[1])
+
+        threshold_tensor = self.get_hls_compatible_threshold_tensor(thresholds)
+
+        min_threshold = thresholds.min()
+        max_threshold = thresholds.max()
+        min_input = self.get_input_datatype().min()
+        max_input = self.get_input_datatype().max()
+        # get range required by threshold values
+        tdt_min = min(min_input, min_threshold)
+        tdt_max = max(max_input, max_threshold)
+        if tdt_min < 0:
+            if abs(tdt_min) > tdt_max:
+                tdt = DataType.get_smallest_possible(tdt_min)
+            else:
+                tdt = DataType.get_smallest_possible(0 - tdt_max - 1)
+        else:
+            tdt = DataType.get_smallest_possible(tdt_max)
+        assert np.vectorize(tdt.allowed)(
+            threshold_tensor
+        ).all(), "Thresholds can't be expressed with type %s" % str(tdt)
+
+        thresholds_hls_code = numpy_to_hls_code(
+            threshold_tensor, tdt, "thresholds", False, True
+        )
+        # write thresholds into thresh.h
+        f_thresh = open("{}/thresh.h".format(code_gen_dir), "w")
+        tdt_hls = tdt.get_hls_datatype_str()
+        # use binary to export bipolar activations
+        export_odt = self.get_output_datatype()
+        if self.get_output_datatype() == DataType.BIPOLAR:
+            export_odt = DataType.BINARY
+        odt_hls = export_odt.get_hls_datatype_str()
+        f_thresh.write(
+            "static ThresholdsActivation<{},{},{},{},{},{},{}> threshs \
+            = ".format(
+                self.calc_tmem(),
+                self.get_nodeattr("PE"),
+                threshold_tensor.shape[-1],
+                tdt_hls,
+                odt_hls,
+                self.get_nodeattr("ActVal"),
+                "std::less_equal<%s>" % tdt_hls,
+            )
+        )
+        f_thresh.write(thresholds_hls_code)
+        f_thresh.close()
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+
+        # TODO ensure codegen dir exists
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        # create a npy file fore each input of the node (in_ind is input index)
+        in_ind = 0
+        for inputs in node.input:
+            # it is assumed that the first input of the node is the data input
+            # the second input are the weights
+            # the third input are the thresholds
+            if in_ind == 0:
+                assert (
+                    str(context[inputs].dtype) == "float32"
+                ), """Input datatype is
+                not float32 as expected."""
+                expected_inp_shape = self.get_folded_input_shape()
+                reshaped_input = context[inputs].reshape(expected_inp_shape)
+                if self.get_input_datatype() == DataType.BIPOLAR:
+                    # store bipolar activations as binary
+                    reshaped_input = (reshaped_input + 1) / 2
+                    export_idt = DataType.BINARY
+                else:
+                    export_idt = self.get_input_datatype()
+                # make copy before saving the array
+                reshaped_input = reshaped_input.copy()
+                np.save(
+                    os.path.join(code_gen_dir, "input_{}.npy".format(in_ind)),
+                    reshaped_input,
+                )
+            elif in_ind > 2:
+                raise Exception("Unexpected input found for StreamingFCLayer")
+            in_ind += 1
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            # reinterpret binary output as bipolar where needed
+            if self.get_output_datatype() == DataType.BIPOLAR:
+                out = context[node.output[0]]
+                out = 2 * out - 1
+                context[node.output[0]] = out
+            assert (
+                context[node.output[0]].shape == self.get_folded_output_shape()
+            ), """Output shape is not as expected"""
+            # reshape output to have expected shape
+            oshape = self.get_normal_output_shape()
+            context[node.output[0]] = context[node.output[0]].reshape(*oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            inp = npy_to_rtlsim_input(
+                "{}/input_0.npy".format(code_gen_dir), export_idt, nbits
+            )
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            output = self.rtlsim(sim, inp)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+
+            # load and reshape output
+            output = np.load(out_npy_path)
+            oshape = self.get_normal_output_shape()
+            output = np.asarray([output], dtype=np.float32).reshape(*oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "activations.hpp"']
+        self.code_gen_dict["$GLOBALS$"] += ['#include "thresh.h"']
+
+    # TODO check and add whatever missing
+    def defines(self, var):
+        numInputVectors = list(self.get_nodeattr("numInputVectors"))
+        numReps = numInputVectors[0]
+        self.code_gen_dict["$DEFINES$"] = [
+            """#define NumChannels1 {}\n #define PE1 {}\n #define numReps {}""".format(
+                self.get_nodeattr("NumChannels"), self.get_nodeattr("PE"), numReps,
+            )
+        ]
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        # note: the innermost dim is reversed for the input
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0, false);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        tmpl_args = self.get_template_param_values()
+        # TODO: why put some template parameters into defines and not others?
+        # should ImgDim be defined or just filled in here like we do now?
+        node = self.onnx_node
+        ishape = self.get_folded_input_shape()
+        if len(ishape) == 3:
+            imgdim = 1
+        elif len(ishape) == 5:
+            imgdim = ishape[1]
+        else:
+            raise Exception("""Unexpeted input shape""")
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """{}<{}, NumChannels1, PE1, {}, {}>
+            (in0, out, threshs, numReps);""".format(
+                node.op_type, imgdim, tmpl_args["TSrcI"], tmpl_args["TDstI"],
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        if dtype == DataType.BIPOLAR:
+            # use binary for bipolar storage
+            dtype = DataType.BINARY
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        shape = self.get_folded_output_shape()
+        shape_cpp_str = str(shape).replace("(", "{").replace(")", "}")
+
+        # note: the innermost dim is not reversed for the output
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s", false);'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                shape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}>> &in0,
+                hls::stream<ap_uint<{}>> &out
+                )""".format(
+                self.onnx_node.name,
+                self.get_instream_width(),
+                self.get_outstream_width(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+        # the threshold tensor is acc_type [PE][TMEM][N_THRES]
+        # partition for parallel access along PE and N_THRES
+        # dimensions (dims 1 and 3)
+        self.code_gen_dict["$PRAGMAS$"].append(
+            (
+                "#pragma HLS ARRAY_PARTITION variable=threshs.m_thresholds "
+                "complete dim=1"
+            )
+        )
+        self.code_gen_dict["$PRAGMAS$"].append(
+            (
+                "#pragma HLS ARRAY_PARTITION variable=threshs.m_thresholds "
+                "complete dim=3"
+            )
+        )
+        # set resource type
+        ram_style = self.get_nodeattr("ram_style")
+        pe = self.get_nodeattr("PE")
+        ich = self.get_nodeattr("NumChannels")
+        # if PE less than NumChannels, assign cores according to ram_style;
+        # otherwise if PE == NumChannels, Vivado HLS will unroll to FFs
+        if pe < ich:
+            if ram_style == "distributed":
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    (
+                        "#pragma HLS RESOURCE variable=threshs.m_thresholds "
+                        "core=ROM_2P_LUTRAM"
+                    )
+                )
+            elif ram_style == "block":
+                self.code_gen_dict["$PRAGMAS$"].append(
+                    (
+                        "#pragma HLS RESOURCE variable=threshs.m_thresholds "
+                        "core=ROM_2P_BRAM"
+                    )
+                )
+            else:
+                raise Exception(
+                    """Invalid value for attribute ram_style! Is currently set to: {}
+                has to be set to one of ("block", "distributed")""".format(
+                        ram_style
+                    )
+                )
diff --git a/src/finn/custom_op/fpgadataflow/tlastmarker.py b/src/finn/custom_op/fpgadataflow/tlastmarker.py
index 25ea05e3607a52731ae1b64de421837bf137ee2b..38a139c279701ae7892f41b63c3c717a3e736691 100644
--- a/src/finn/custom_op/fpgadataflow/tlastmarker.py
+++ b/src/finn/custom_op/fpgadataflow/tlastmarker.py
@@ -30,20 +30,31 @@ from finn.custom_op.fpgadataflow import HLSCustomOp
 
 
 class TLastMarker(HLSCustomOp):
-    """Class that corresponds to the TLastMarker node that needs to be
-    inserted at the end of the model for rtlsim with stitched IP.
-    It marks the end of the current image/input sample."""
+    """Node that adds/removes AXI stream TLAST signals where needed. Its behavior
+    is transparent in node-by-node execution, only visible in IP-stitched rtlsim or
+    actual hardware.
+    This node  may be needed at the end of the network to signal a DMA write
+    (needed by the FINN PYNQ shell) or at the beginning to remove the end-of-burst
+    from DMA read."""
 
     def __init__(self, onnx_node):
         super().__init__(onnx_node)
 
     def get_nodeattr_types(self):
         my_attrs = {
+            # number of (static) iterations until TLAST=1 is generated for Direction=out
             "NumIters": ("i", True, 0),
+            # whether static or dynamic (from AXI lite) number of iterations are used
+            "DynIters": ("i", False, 1),
+            # direction: whether to insert or remove TLAST
+            "Direction": ("s", False, "out"),
             # width of input-output data streams, in bits
             "StreamWidth": ("i", True, 0),
             # width of individual element in stream, in bits
             "ElemWidth": ("i", True, 0),
+            # Protocol: external or internal
+            # Vitis docs recommend using qdma_axis for external, ap_axiu for internal
+            "Protocol": ("s", False, "external"),
         }
         my_attrs.update(super().get_nodeattr_types())
         return my_attrs
@@ -76,12 +87,33 @@ class TLastMarker(HLSCustomOp):
 
     def defines(self, var):
         stream_width = self.get_nodeattr("StreamWidth")
+        direction = self.get_nodeattr("Direction")
+        protocol = self.get_nodeattr("Protocol")
         # output stream must have TLAST, so we use this stream data type:
         # qdma_axis<stream_data_width,0,0,0 >
-        out_stream_dtype = "qdma_axis<%d,0,0,0>" % stream_width
+        if direction == "out":
+            if protocol == "external":
+                out_stream_dtype = "qdma_axis<%d,0,0,0>" % stream_width
+            elif protocol == "internal":
+                out_stream_dtype = "ap_axiu<%d,0,0,0>" % stream_width
+            else:
+                raise Exception("Unrecognized Protocol in TLastMarker")
+            in_stream_dtype = "ap_uint<%d>" % stream_width
+        elif direction == "in":
+            out_stream_dtype = "ap_uint<%d>" % stream_width
+            if protocol == "external":
+                in_stream_dtype = "qdma_axis<%d,0,0,0>" % stream_width
+            elif protocol == "internal":
+                in_stream_dtype = "ap_axiu<%d,0,0,0>" % stream_width
+            else:
+                raise Exception("Unrecognized Protocol in TLastMarker")
+        else:
+            raise Exception("Unrecognized Direction in TLastMarker")
+
         self.code_gen_dict["$DEFINES$"] = [
             "#define StreamWidth %d" % stream_width,
             "#define OutDType %s" % out_stream_dtype,
+            "#define InDType %s" % in_stream_dtype,
             "#define NumItersPerImg %d" % self.get_nodeattr("NumIters"),
         ]
 
@@ -89,27 +121,60 @@ class TLastMarker(HLSCustomOp):
         self.code_gen_dict["$READNPYDATA$"] = []
 
     def docompute(self):
-        self.code_gen_dict["$DOCOMPUTE$"] = [
-            "unsigned int n = 1;",
-            "OutDType t;",
-            "t.set_keep(-1);",
-            "io_section: { // start of cycle accurate region",
-            "#pragma HLS protocol fixed",
-            "// do a first read from stream before we decide on numIters",
-            "// giving software a chance to set up the numIters prior to startup",
-            "t.set_data(in0.read());",
-            "n = (numIters == 0 ? NumItersPerImg : numIters);",
-            "t.set_last(n==1);",
-            "out.write(t);",
-            "} // end of cycle accurate region",
-            "// do one less iteration than spec since we already did one",
-            "for(unsigned int i=1; i<n; i++) {",
-            "#pragma HLS PIPELINE II=1",
-            "t.set_data(in0.read());",
-            "t.set_last(i==(n-1));",
-            "out.write(t);",
-            "}",
-        ]
+        dyn_iters = self.get_nodeattr("DynIters")
+        direction = self.get_nodeattr("Direction")
+        use_qdma_axis = self.get_nodeattr("Protocol") == "external"
+        if direction == "in":
+            # read from input and just pass data along; ignore tlast
+            # no dyn iters on input, it doesnt make sense
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                "for(unsigned int i=0; i<NumItersPerImg; i++) {",
+                "#pragma HLS PIPELINE II=1",
+                "out.write(in0.read().get_data());"
+                if use_qdma_axis
+                else "out.write(in0.read().data);",
+                "}",
+            ]
+
+        elif dyn_iters == 1:
+            # output, with dynamic iteration counts
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                "unsigned int n = 1;",
+                "OutDType t;",
+                "t.set_keep(-1);" if use_qdma_axis else "t.keep = -1;",
+                "io_section: { // start of cycle accurate region",
+                "#pragma HLS protocol fixed",
+                "// do a first read from stream before we decide on numIters",
+                "// giving software a chance to set up the numIters prior to startup",
+                "t.set_data(in0.read());" if use_qdma_axis else "t.data = in0.read();",
+                "n = (numIters == 0 ? NumItersPerImg : numIters);",
+                "t.set_last(n==1);" if use_qdma_axis else "t.last = (n==1);",
+                "out.write(t);",
+                "} // end of cycle accurate region",
+                "// do one less iteration than spec since we already did one",
+                "for(unsigned int i=1; i<n; i++) {",
+                "#pragma HLS PIPELINE II=1",
+                "t.set_data(in0.read());" if use_qdma_axis else "t.data = in0.read();",
+                "t.set_last(i==(n-1));" if use_qdma_axis else "t.last = (i==(n-1));",
+                "out.write(t);",
+                "}",
+            ]
+
+        else:
+            # output, with static iteration counts
+            self.code_gen_dict["$DOCOMPUTE$"] = [
+                "unsigned int n = 1;",
+                "OutDType t;",
+                "t.set_keep(-1);" if use_qdma_axis else "t.keep = -1;",
+                "for(unsigned int i=0; i<NumItersPerImg; i++) {",
+                "#pragma HLS PIPELINE II=1",
+                "t.set_data(in0.read());" if use_qdma_axis else "t.data = in0.read();",
+                "t.set_last(i==(NumItersPerImg-1));"
+                if use_qdma_axis
+                else "t.last = (i==(NumItersPerImg-1));",
+                "out.write(t);",
+                "}",
+            ]
 
     def dataoutstrm(self):
         self.code_gen_dict["$DATAOUTSTREAM$"] = []
@@ -118,18 +183,30 @@ class TLastMarker(HLSCustomOp):
         self.code_gen_dict["$SAVEASCNPY$"] = []
 
     def blackboxfunction(self):
-        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
-            """void %s(hls::stream<ap_uint<StreamWidth> > &in0,
-                hls::stream<OutDType> &out, unsigned int numIters)"""
-            % self.onnx_node.name
-        ]
+        dyn_iters = self.get_nodeattr("DynIters")
+
+        if dyn_iters == 1:
+            self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+                """void %s(hls::stream<InDType> &in0,
+                    hls::stream<OutDType> &out, unsigned int numIters)"""
+                % self.onnx_node.name
+            ]
+        else:
+            self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+                """void %s(hls::stream<InDType> &in0, hls::stream<OutDType> &out)"""
+                % self.onnx_node.name
+            ]
 
     def pragmas(self):
         self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
         self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
-        self.code_gen_dict["$PRAGMAS$"].append(
-            "#pragma HLS INTERFACE s_axilite port=numIters bundle=control"
-        )
+
+        dyn_iters = self.get_nodeattr("DynIters")
+        if dyn_iters == 1:
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS INTERFACE s_axilite port=numIters bundle=control"
+            )
+
         self.code_gen_dict["$PRAGMAS$"].append(
             "#pragma HLS INTERFACE ap_ctrl_none port=return"
         )
@@ -158,8 +235,20 @@ class TLastMarker(HLSCustomOp):
     def strm_decl(self):
         self.code_gen_dict["$STREAMDECLARATIONS$"] = []
         self.code_gen_dict["$STREAMDECLARATIONS$"].append(
-            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+            'hls::stream<InDType> in0 ("in0");'
         )
         self.code_gen_dict["$STREAMDECLARATIONS$"].append(
             'hls::stream<OutDType> out ("out");'
         )
+
+    def get_verilog_top_module_intf_names(self):
+        intf_names = super().get_verilog_top_module_intf_names()
+        if self.get_nodeattr("Direction") == "in":
+            intf_names["s_axis"] = ["in0"]
+            intf_names["m_axis"] = ["out_V_V"]
+        else:
+            intf_names["s_axis"] = ["in0_V_V"]
+            intf_names["m_axis"] = ["out_r"]
+        if self.get_nodeattr("DynIters") == 1:
+            intf_names["axilite"] = ["s_axi_control"]
+        return intf_names
diff --git a/src/finn/custom_op/fpgadataflow/vector_vector_activate_batch.py b/src/finn/custom_op/fpgadataflow/vector_vector_activate_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..c7c08d081a04ff72ae2a198e65091d042bd8d599
--- /dev/null
+++ b/src/finn/custom_op/fpgadataflow/vector_vector_activate_batch.py
@@ -0,0 +1,506 @@
+import os
+import numpy as np
+
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.custom_op.fpgadataflow import HLSCustomOp
+from finn.util.basic import interleave_matrix_outer_dim_from_partitions
+from finn.util.data_packing import (
+    npy_to_rtlsim_input,
+    numpy_to_hls_code,
+    rtlsim_output_to_npy,
+)
+
+
+class Vector_Vector_Activate_Batch(HLSCustomOp):
+    """Class that corresponds to finn-hlslib Vector_Vector_Activate_Batch function"""
+
+    def __init__(self, onnx_node):
+        super().__init__(onnx_node)
+
+    def get_nodeattr_types(self):
+        my_attrs = {
+            "PE": ("i", True, 0),
+            "Dim": ("i", True, 0),
+            "Channels": ("i", True, 0),
+            "Kernel": ("i", True, 0),
+            "resType": ("s", True, ""),
+            "ActVal": ("i", False, 0),
+            # FINN DataTypes for inputs, weights, outputs
+            "inputDataType": ("s", True, ""),
+            "weightDataType": ("s", True, ""),
+            "outputDataType": ("s", True, ""),
+            # no-activation mode (produce accumulators)
+            "noActivation": ("i", False, 0),
+        }
+        my_attrs.update(super().get_nodeattr_types())
+        return my_attrs
+
+    def calc_wmem(self):
+        """Calculates and returns WMEM."""
+        ch = self.get_nodeattr("Channels")
+        k = self.get_nodeattr("Kernel")
+        pe = self.get_nodeattr("PE")
+        wmem = k * k * ch // pe
+        return wmem
+
+    def calc_tmem(self):
+        """Calculates and returns TMEM."""
+        if self.get_nodeattr("noActivation") == 1:
+            return 0
+        else:
+            ch = self.get_nodeattr("Channels")
+            pe = self.get_nodeattr("PE")
+            return ch // pe
+
+    def make_shape_compatible_op(self, model):
+        oshape = self.get_normal_output_shape()
+        # implement tensor with correct shape
+        values = np.random.randn(*oshape).astype(np.float32)
+        return helper.make_node(
+            "Constant",
+            inputs=[],
+            outputs=[self.onnx_node.output[0]],
+            value=helper.make_tensor(
+                name="const_tensor",
+                data_type=TensorProto.FLOAT,
+                dims=values.shape,
+                vals=values.flatten().astype(float),
+            ),
+        )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        # check input datatype against property
+        idt_name = self.get_input_datatype().name
+        exp_idt_name = self.get_nodeattr("inputDataType")
+        assert exp_idt_name == idt_name, "Bad input DataType for VVAU  node"
+        # set output datatype from property
+        odt = self.get_output_datatype()
+        model.set_tensor_datatype(node.output[0], odt)
+
+    def verify_node(self):
+        pass
+
+    def get_input_datatype(self):
+        """Returns FINN DataType of input."""
+        return DataType[self.get_nodeattr("inputDataType")]
+
+    def get_weight_datatype(self):
+        """Returns FINN DataType of weights."""
+        return DataType[self.get_nodeattr("weightDataType")]
+
+    def get_output_datatype(self):
+        """Returns FINN DataType of output."""
+        return DataType[self.get_nodeattr("outputDataType")]
+
+    def get_instream_width(self):
+        i_bits = self.get_input_datatype().bitwidth()
+        in_width = i_bits * self.get_nodeattr("PE")
+        return in_width
+
+    def get_outstream_width(self):
+        o_bits = self.get_output_datatype().bitwidth()
+        out_width = o_bits * self.get_nodeattr("PE")
+        return out_width
+
+    def get_folded_input_shape(self):
+        k = self.get_nodeattr("Kernel")
+        sf = k * k
+        dim = self.get_nodeattr("Dim")
+        ch = self.get_nodeattr("Channels")
+        pe = self.get_nodeattr("PE")
+        nf = ch // pe
+        folded_input_shape = tuple([1, dim, dim, sf * nf, pe])
+        return folded_input_shape
+
+    def get_folded_output_shape(self):
+        ch = self.get_nodeattr("Channels")
+        pe = self.get_nodeattr("PE")
+        nf = ch // pe
+        dim = self.get_nodeattr("Dim")
+        folded_output_shape = tuple([1, dim, dim, nf, pe])
+        return folded_output_shape
+
+    def get_normal_input_shape(self):
+        dim = self.get_nodeattr("Dim")
+        ch = self.get_nodeattr("Channels")
+        k = self.get_nodeattr("Kernel")
+        normal_input_shape = tuple([1, dim, dim, k * k * ch])
+        return normal_input_shape
+
+    def get_normal_output_shape(self):
+        ch = self.get_nodeattr("Channels")
+        dim = self.get_nodeattr("Dim")
+        normal_output_shape = tuple([1, dim, dim, ch])
+        return normal_output_shape
+
+    def get_number_output_values(self):
+        nf = np.prod(self.get_folded_output_shape()[:-1])
+        return nf
+
+    def get_exp_cycles(self):
+        pe = self.get_nodeattr("PE")
+        ch = self.get_nodeattr("Channels")
+        dim = self.get_nodeattr("Dim")
+        k = self.get_nodeattr("Kernel")
+        # currently FINN supports for vvau a batch size of 1
+        batch_size = 1
+        # since mmv != 1 is not supported yet, we set mmv for now to 1
+        mmv = 1
+        exp_cycles = ((ch * k * k) / pe) * batch_size * (dim * dim) / mmv
+        return int(exp_cycles)
+
+    def get_template_param_values(self):
+        """Returns the template parameter values according to input, output and weight
+        data types."""
+        ret = dict()
+        inp_hls_str = self.get_input_datatype().get_hls_datatype_str()
+        out_hls_str = self.get_output_datatype().get_hls_datatype_str()
+        inp_is_bipolar = self.get_input_datatype() == DataType.BIPOLAR
+        wt_is_bipolar = self.get_weight_datatype() == DataType.BIPOLAR
+        # fill in TSrcI and TWeightI
+        # TODO handle bipolar inputs
+        if inp_is_bipolar or wt_is_bipolar:
+            raise Exception("VVAU node doesn't support bipolar values yet.")
+        else:
+            ret["TSrcI"] = "Slice<%s>" % inp_hls_str
+            ret["TWeightI"] = "Identity"
+
+        # fill in TDstI
+        ret["TDstI"] = "Slice<%s>" % out_hls_str
+
+        return ret
+
+    def get_hls_compatible_weight_tensor(self, orig_weight_matrix):
+        pe = self.get_nodeattr("PE")
+        ch = self.get_nodeattr("Channels")
+        k = self.get_nodeattr("Kernel")
+        wmem = self.calc_wmem()
+        assert orig_weight_matrix.shape == (
+            ch,
+            1,
+            k,
+            k,
+        ), """Weights matrix doesn't
+        have expected shape (channels, 1, kernel_size, kernel_size)"""
+        ret = orig_weight_matrix
+        ret = ret.reshape(ch, k * k)
+        # distribute rows between PEs
+        ret = interleave_matrix_outer_dim_from_partitions(ret, pe)
+        ret = ret.reshape(1, pe, wmem, 1)
+        return ret
+
+    def get_hls_compatible_threshold_tensor(self, orig_thres_matrix):
+        ch = self.get_nodeattr("Channels")
+        pe = self.get_nodeattr("PE")
+        tmem = self.calc_tmem()
+        assert ch % pe == 0, "Requirement Channels divisable by PE is violated."
+        assert (
+            orig_thres_matrix.ndim == 2
+        ), """Threshold matrix dimension is
+        not as expected (2)."""
+        n_thres_steps = orig_thres_matrix.shape[1]
+        ret = orig_thres_matrix
+        # distribute rows between PEs
+        ret = interleave_matrix_outer_dim_from_partitions(ret, pe)
+        assert (
+            ret.shape[0] == pe
+        ), """First dimension after distribution of the
+        rows between PEs is not as expected (pe)"""
+        assert (
+            ret.shape[1] == tmem
+        ), """Second dimension after distribution of the
+        rows between PEs is not as expected (tmem)"""
+        assert (
+            ret.shape[2] == n_thres_steps
+        ), """Third dimension after distribution of the
+        rows between PEs is not as expected (n_thres_steps)"""
+        return ret.reshape(1, pe, tmem, n_thres_steps)
+
+    def generate_params(self, model, path):
+        # weights
+        weights = model.get_initializer(self.onnx_node.input[1])
+        # convert weights into hlslib-compatible format
+        weight_tensor = self.get_hls_compatible_weight_tensor(weights)
+        wdt = self.get_weight_datatype()
+        code_gen_dir = path
+
+        """Saves weights into params.h"""
+        weight_hls_code = numpy_to_hls_code(weight_tensor, wdt, "weights", True, True)
+        # write weights into params.h
+        f_weights = open("{}/params.h".format(code_gen_dir), "w")
+
+        if wdt.bitwidth() != 1:
+            f_weights.write(
+                "const FixedPointWeights<1,{},{},{}> weights = ".format(
+                    wdt.get_hls_datatype_str(),
+                    self.get_nodeattr("PE"),
+                    self.calc_wmem(),
+                )
+            )
+        else:
+            f_weights.write(
+                "const BinaryWeights<1,{},{}> weights = ".format(
+                    self.get_nodeattr("PE"), self.calc_wmem()
+                )
+            )
+        f_weights.write(weight_hls_code)
+        f_weights.close()
+
+        # save thresholds in thresh.h
+        if len(self.onnx_node.input) > 2:
+            thresholds = model.get_initializer(self.onnx_node.input[2])
+            if thresholds is not None:
+                threshold_tensor = self.get_hls_compatible_threshold_tensor(thresholds)
+                tdt = DataType.INT32
+                assert np.vectorize(tdt.allowed)(
+                    threshold_tensor
+                ).all(), "Thresholds are not int"
+                thresholds_hls_code = numpy_to_hls_code(
+                    threshold_tensor, tdt, "thresholds", False, True
+                )
+                # write thresholds into thresh.h
+                f_thresh = open("{}/thresh.h".format(code_gen_dir), "w")
+                tdt_hls = tdt.get_hls_datatype_str()
+                odt = self.get_output_datatype()
+                odt_hls = odt.get_hls_datatype_str()
+                f_thresh.write(
+                    "static ThresholdsActivation<{},{},{},{},{},{},{}> threshs \
+                    = ".format(
+                        self.calc_tmem(),
+                        self.get_nodeattr("PE"),
+                        threshold_tensor.shape[-1],
+                        tdt_hls,
+                        odt_hls,
+                        self.get_nodeattr("ActVal"),
+                        "std::less_equal<%s>" % tdt_hls,
+                    )
+                )
+                f_thresh.write(thresholds_hls_code)
+                f_thresh.close()
+
+    def execute_node(self, context, graph):
+        mode = self.get_nodeattr("exec_mode")
+        node = self.onnx_node
+
+        # TODO ensure codegen dir exists
+        if mode == "cppsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        elif mode == "rtlsim":
+            code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+        # create a npy file fore each input of the node (in_ind is input index)
+        in_ind = 0
+        for inputs in node.input:
+            # it is assumed that the first input of the node is the data input
+            # the second input are the weights
+            # the third input are the thresholds
+            if in_ind == 0:
+                assert (
+                    str(context[inputs].dtype) == "float32"
+                ), """Input datatype is
+                not float32 as expected."""
+                expected_inp_shape = self.get_folded_input_shape()
+                reshaped_input = context[inputs].reshape(expected_inp_shape)
+                # make copy before saving the array
+                reshaped_input = reshaped_input.copy()
+                np.save(
+                    os.path.join(code_gen_dir, "input_{}.npy".format(in_ind)),
+                    reshaped_input,
+                )
+            elif in_ind > 2:
+                raise Exception(
+                    "Unexpected input found for Vector_Vector_Activate_Unit"
+                )
+            in_ind += 1
+
+        if mode == "cppsim":
+            # execute the precompiled model
+            super().exec_precompiled_singlenode_model()
+            # load output npy file
+            super().npy_to_dynamic_output(context)
+            assert (
+                context[node.output[0]].shape == self.get_folded_output_shape()
+            ), """Output shape is not as expected"""
+            # reshape output to have expected shape
+            oshape = self.get_normal_output_shape()
+            context[node.output[0]] = context[node.output[0]].reshape(*oshape)
+        elif mode == "rtlsim":
+            sim = self.get_rtlsim()
+            nbits = self.get_instream_width()
+            idt = self.get_input_datatype()
+            inp = npy_to_rtlsim_input("{}/input_0.npy".format(code_gen_dir), idt, nbits)
+            super().reset_rtlsim(sim)
+            super().toggle_clk(sim)
+            output = self.rtlsim(sim, inp)
+            odt = self.get_output_datatype()
+            target_bits = odt.bitwidth()
+            packed_bits = self.get_outstream_width()
+            out_npy_path = "{}/output.npy".format(code_gen_dir)
+            out_shape = self.get_folded_output_shape()
+            rtlsim_output_to_npy(
+                output, out_npy_path, odt, out_shape, packed_bits, target_bits
+            )
+
+            # load and reshape output
+            output = np.load(out_npy_path)
+            oshape = self.get_normal_output_shape()
+            output = np.asarray([output], dtype=np.float32).reshape(*oshape)
+            context[node.output[0]] = output
+        else:
+            raise Exception(
+                """Invalid value for attribute exec_mode! Is currently set to: {}
+            has to be set to one of the following value ("cppsim", "rtlsim")""".format(
+                    mode
+                )
+            )
+
+    def global_includes(self):
+        self.code_gen_dict["$GLOBALS$"] = ['#include "weights.hpp"']
+        self.code_gen_dict["$GLOBALS$"] += ['#include "activations.hpp"']
+        if self.calc_tmem() != 0:
+            self.code_gen_dict["$GLOBALS$"] += ['#include "thresh.h"']
+
+    def defines(self, var):
+        dim = self.get_nodeattr("Dim")
+        numReps = 1 * dim * dim
+        self.code_gen_dict["$DEFINES$"] = [
+            """#define Channels1 {}\n #define Kernel1 {}\n
+            #define SIMD1 1\n #define PE1 {}\n #define numReps {}""".format(
+                self.get_nodeattr("Channels"),
+                self.get_nodeattr("Kernel"),
+                self.get_nodeattr("PE"),
+                numReps,
+            )
+        ]
+
+    def read_npy_data(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_input_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_instream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_in = "%s/input_0.npy" % code_gen_dir
+        self.code_gen_dict["$READNPYDATA$"] = []
+        # note: the innermost dim is reversed for the input
+        self.code_gen_dict["$READNPYDATA$"].append(
+            'npy2apintstream<%s, %s, %d, %s>("%s", in0, false);'
+            % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in)
+        )
+
+    def strm_decl(self):
+        self.code_gen_dict["$STREAMDECLARATIONS$"] = []
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width())
+        )
+        self.code_gen_dict["$STREAMDECLARATIONS$"].append(
+            'hls::stream<ap_uint<{}>> out ("out");'.format(self.get_outstream_width())
+        )
+
+    def docompute(self):
+        tmpl_args = self.get_template_param_values()
+        if self.calc_tmem() == 0:
+            odtype_hls_str = self.get_output_datatype().get_hls_datatype_str()
+            threshs = "PassThroughActivation<%s>()" % odtype_hls_str
+        else:
+            threshs = "threshs"
+        node = self.onnx_node
+        self.code_gen_dict["$DOCOMPUTE$"] = [
+            """{}<Channels1, Kernel1, SIMD1, PE1, 1, {}, {}, {}>
+            (in0, out, weights, {}, numReps, {});""".format(
+                node.op_type,
+                tmpl_args["TSrcI"],
+                tmpl_args["TDstI"],
+                tmpl_args["TWeightI"],
+                threshs,
+                self.get_nodeattr("resType"),
+            )
+        ]
+
+    def dataoutstrm(self):
+        code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
+        dtype = self.get_output_datatype()
+        elem_bits = dtype.bitwidth()
+        packed_bits = self.get_outstream_width()
+        packed_hls_type = "ap_uint<%d>" % packed_bits
+        elem_hls_type = dtype.get_hls_datatype_str()
+        npy_type = "float"
+        npy_out = "%s/output.npy" % code_gen_dir
+        shape = self.get_folded_output_shape()
+        shape_cpp_str = str(shape).replace("(", "{").replace(")", "}")
+
+        # note: the innermost dim is not reversed for the output
+        self.code_gen_dict["$DATAOUTSTREAM$"] = [
+            'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s", false);'
+            % (
+                packed_hls_type,
+                elem_hls_type,
+                elem_bits,
+                npy_type,
+                shape_cpp_str,
+                npy_out,
+            )
+        ]
+
+    def save_as_npy(self):
+        self.code_gen_dict["$SAVEASCNPY$"] = []
+
+    def blackboxfunction(self):
+        self.code_gen_dict["$BLACKBOXFUNCTION$"] = [
+            """void {}(hls::stream<ap_uint<{}>> &in0,
+            hls::stream<ap_uint<{}>> &out
+            )""".format(
+                self.onnx_node.name,
+                self.get_instream_width(),
+                self.get_outstream_width(),
+            )
+        ]
+
+    def pragmas(self):
+        self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"]
+        self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out")
+        in_fifo_depth = self.get_nodeattr("inFIFODepth")
+        out_fifo_depth = self.get_nodeattr("outFIFODepth")
+        # insert depth pragmas only if specified
+        if in_fifo_depth != 0:
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS stream depth=%d variable=in0" % in_fifo_depth
+            )
+        if out_fifo_depth != 0:
+            self.code_gen_dict["$PRAGMAS$"].append(
+                "#pragma HLS stream depth=%d variable=out" % out_fifo_depth
+            )
+        self.code_gen_dict["$PRAGMAS$"].append(
+            "#pragma HLS INTERFACE ap_ctrl_none port=return"
+        )
+
+        self.code_gen_dict["$PRAGMAS$"].append('#include "params.h"')
+        # the weight tensor is ap_uint<ch*prec> [PE][WMEM]
+        # partition for parallel access along the PE dimension (dim 1)
+        self.code_gen_dict["$PRAGMAS$"].append(
+            ("#pragma HLS ARRAY_PARTITION variable=weights.m_weights " "complete dim=1")
+        )
+        if self.calc_tmem() != 0:
+            # TODO find a better way of checking for no pregenerated thresholds
+            self.code_gen_dict["$PRAGMAS$"].append(
+                (
+                    "#pragma HLS ARRAY_PARTITION variable=threshs.m_thresholds "
+                    "complete dim=1"
+                )
+            )
+            self.code_gen_dict["$PRAGMAS$"].append(
+                (
+                    "#pragma HLS ARRAY_PARTITION variable=threshs.m_thresholds "
+                    "complete dim=3"
+                )
+            )
diff --git a/src/finn/custom_op/im2col.py b/src/finn/custom_op/im2col.py
index 16446c15d46ee7996162f864708f7fde6cfedaf3..8ed0041704d421dab587f08bcbcd9e739e8434e9 100644
--- a/src/finn/custom_op/im2col.py
+++ b/src/finn/custom_op/im2col.py
@@ -21,8 +21,6 @@ def get_im2col_indices_nchw(
     """Returns im2col indices."""
     # First figure out what the size of the output should be
     N, C, H, W = x_shape
-    assert (H + 2 * padding - field_height) % stride_y == 0
-    assert (W + 2 * padding - field_width) % stride_x == 0
     out_height = compute_conv_output_dim(H, field_height, stride_y, padding)
     out_width = compute_conv_output_dim(W, field_width, stride_x, padding)
 
@@ -70,6 +68,9 @@ def im2col_indices_nchw(
 # * ifm is the number of input channels
 # * k is the convolutional kernel size
 
+# note: for the innermost (dot product) dimension of k*k*ifm, we
+# assume an internal ordering (k, k, ifm)
+
 
 class Im2Col(CustomOp):
     def get_nodeattr_types(self):
@@ -79,6 +80,8 @@ class Im2Col(CustomOp):
             "input_shape": ("s", True, ""),
             "pad_amount": ("i", False, 0),
             "pad_value": ("i", False, 0),
+            # depthwise: if != 0, infer ConvolutionInputGenerator with depthwise == 1
+            "depthwise": ("i", False, 0),
         }
 
     def make_shape_compatible_op(self, model):
diff --git a/src/finn/custom_op/multithreshold.py b/src/finn/custom_op/multithreshold.py
index 37f8e0950b5fc352c8f9fe005884724f028879a0..bc0a454cdf847d124b12c940b029f51bf2d3e778 100644
--- a/src/finn/custom_op/multithreshold.py
+++ b/src/finn/custom_op/multithreshold.py
@@ -33,16 +33,6 @@ from finn.core.datatype import DataType
 from finn.custom_op import CustomOp
 
 
-def compare(x, y):
-    """Comparison helper function for multithresholding.
-
-    Gets two values and returns 1.0 if x>=y otherwise 0.0."""
-    if x >= y:
-        return 1.0
-    else:
-        return 0.0
-
-
 def multithreshold(v, thresholds, out_scale=None, out_bias=None):
     """Given a set of threshold values t={t_0, t_1 ... t_n} the successive
     thresholding maps any real number x to an integer in the interval [0, n],
@@ -76,8 +66,6 @@ def multithreshold(v, thresholds, out_scale=None, out_bias=None):
     num_act = thresholds.shape[1]
     # reshape inputs to enable channel-wise reading
     vr = v.reshape((v.shape[0], v.shape[1], -1))
-    # save the new shape size of the images
-    num_img_elem = vr.shape[2]
     # initiate output tensor
     ret = np.zeros_like(vr)
     # iterate over thresholds channel-wise
@@ -85,12 +73,10 @@ def multithreshold(v, thresholds, out_scale=None, out_bias=None):
         channel_thresh = thresholds[0] if is_global_threshold else thresholds[t]
         # iterate over batches
         for b in range(num_batch):
-            # iterate over image elements on which the thresholds will be applied
-            for elem in range(num_img_elem):
-                # iterate over the different thresholds for one channel
-                for a in range(num_act):
-                    # apply successive thresholding to every element
-                    ret[b][t][elem] += compare(vr[b][t][elem], channel_thresh[a])
+            # iterate over the different thresholds for one channel
+            for a in range(num_act):
+                ret[b][t] += (vr[b][t] >= channel_thresh[a]).astype(int)
+
     if out_scale is None:
         out_scale = 1.0
     if out_bias is None:
diff --git a/src/finn/custom_op/quantavgpool2d.py b/src/finn/custom_op/quantavgpool2d.py
new file mode 100644
index 0000000000000000000000000000000000000000..28d01069264d883f3afc400808470f5f303be799
--- /dev/null
+++ b/src/finn/custom_op/quantavgpool2d.py
@@ -0,0 +1,136 @@
+import numpy as np
+from onnx import TensorProto, helper
+import onnxruntime as rt
+
+from finn.custom_op import CustomOp
+from finn.core.datatype import DataType
+from finn.custom_op.maxpoolnhwc import compute_pool_output_dim
+
+
+class QuantAvgPool2d(CustomOp):
+    """Class that corresponds to the quantized average pooling
+    layer from brevitas"""
+
+    def get_nodeattr_types(self):
+        return {
+            "stride": ("i", True, 1),
+            "kernel": ("i", True, 1),
+            "ibits": ("i", True, 1),
+            "obits": ("i", True, 1),
+            # determines if values are signed (set to "1") or unsigned ("0")
+            "signed": ("i", True, 0),
+            # data layout attribute can be set to "NCHW" or "NHWC"
+            "data_layout": ("s", False, "NCHW"),
+        }
+
+    def make_shape_compatible_op(self, model):
+        node = self.onnx_node
+        k = self.get_nodeattr("kernel")
+        s = self.get_nodeattr("stride")
+        data_layout = self.get_nodeattr("data_layout")
+        if data_layout == "NCHW":
+            return helper.make_node(
+                "AveragePool",
+                inputs=[node.input[0]],
+                outputs=[node.output[0]],
+                kernel_shape=[k, k],
+                strides=[s, s],
+            )
+        elif data_layout == "NHWC":
+            iname = node.input[0]
+            ishape = model.get_tensor_shape(iname)
+            (n, hi, wi, c) = ishape
+            ho = compute_pool_output_dim(hi, k, s)
+            wo = compute_pool_output_dim(wi, k, s)
+            oshape = (n, ho, wo, c)
+            # implement tensor with correct shape
+            values = np.random.randn(*oshape).astype(np.float32)
+            return helper.make_node(
+                "Constant",
+                inputs=[],
+                outputs=[node.output[0]],
+                value=helper.make_tensor(
+                    name="const_tensor",
+                    data_type=TensorProto.FLOAT,
+                    dims=values.shape,
+                    vals=values.flatten().astype(float),
+                ),
+            )
+
+        else:
+            raise Exception(
+                """Datalayout for QuantAvgPool2d is set to an invalid value.
+                    Has to be set to "NCHW" or "NHWC"."""
+            )
+
+    def infer_node_datatype(self, model):
+        node = self.onnx_node
+        bw = self.get_nodeattr("obits")
+        if bw in [2, 4, 8, 16, 32]:
+            if self.get_nodeattr("signed") == 0:
+                dtype = DataType["UINT%d" % bw]
+            else:
+                dtype = DataType["INT%d" % bw]
+        else:
+            raise Exception("Unsupported output datatype for QuantAvgPool2d")
+        model.set_tensor_datatype(node.output[0], dtype)
+
+    def get_accum_size(self):
+        ibits = self.get_nodeattr("ibits")
+        k = self.get_nodeattr("kernel")
+        max_value = 2 ** ibits - 1
+        max_value = max_value * k * k
+        max_bit_width = int(max_value).bit_length()
+        return max_bit_width
+
+    def get_shifts(self):
+        shift_bits = self.get_accum_size() - self.get_nodeattr("obits")
+        shift_bits = shift_bits if shift_bits >= 0 else 0
+        return shift_bits
+
+    def execute_node(self, context, graph):
+        # create a standard average pooling node to help calculate the result
+        node = self.onnx_node
+        k = self.get_nodeattr("kernel")
+        s = self.get_nodeattr("stride")
+        inp_values = context[node.input[0]]
+        oshape = context[node.output[0]].shape
+        if self.get_nodeattr("data_layout") == "NHWC":
+            inp_values = inp_values.transpose(0, 3, 1, 2)
+            oshape = (context[node.output[0]]).transpose(0, 3, 1, 2).shape
+        ishape = inp_values.shape
+        inp = helper.make_tensor_value_info(node.input[0], TensorProto.FLOAT, ishape)
+        outp = helper.make_tensor_value_info(node.output[0], TensorProto.FLOAT, oshape)
+        node_avgpool = helper.make_node(
+            "AveragePool",
+            inputs=[node.input[0]],
+            outputs=[node.output[0]],
+            kernel_shape=[k, k],
+            strides=[s, s],
+        )
+        graph_avgpool = helper.make_graph(
+            nodes=[node_avgpool],
+            name="single-avgpool-exec",
+            inputs=[inp],
+            outputs=[outp],
+        )
+        model_avgpool = helper.make_model(graph_avgpool)
+        idict = {node.input[0]: inp_values}
+        sess = rt.InferenceSession(model_avgpool.SerializeToString())
+        result_temp = sess.run(None, idict)
+        # remove scaling introduced by average
+        result_temp = result_temp[0] * (k * k)
+        result = np.right_shift(result_temp.astype(int), self.get_shifts())
+        if self.get_nodeattr("data_layout") == "NHWC":
+            result = result.transpose(0, 2, 3, 1)
+        context[node.output[0]] = result.astype(np.float32)
+
+    def verify_node(self):
+        info_messages = []
+        # verify that "domain" is set to "finn"
+        domain_value = self.onnx_node.domain
+        if domain_value == "finn":
+            info_messages.append("Attribute domain is set correctly")
+        else:
+            info_messages.append('Attribute domain should be set to "finn"')
+        return info_messages
diff --git a/src/finn/custom_op/registry.py b/src/finn/custom_op/registry.py
index 411311c2b9def953ee5ac6d03adfafb81704c177..ecf2a711f17ac35c9bf8cb081fb4dc6d9bb6c01e 100644
--- a/src/finn/custom_op/registry.py
+++ b/src/finn/custom_op/registry.py
@@ -31,6 +31,7 @@
 from finn.custom_op.fpgadataflow.convolutioninputgenerator import (
     ConvolutionInputGenerator,
 )
+from finn.custom_op.fpgadataflow.downsampler import DownSampler
 from finn.custom_op.fpgadataflow.streamingfclayer_batch import StreamingFCLayer_Batch
 from finn.custom_op.fpgadataflow.streamingmaxpool_batch import StreamingMaxPool_Batch
 from finn.custom_op.fpgadataflow.streamingfifo import StreamingFIFO
@@ -43,11 +44,26 @@ from finn.custom_op.maxpoolnhwc import MaxPoolNHWC
 from finn.custom_op.fpgadataflow.streamingdatawidthconverter_batch import (
     StreamingDataWidthConverter_Batch,
 )
+from finn.custom_op.fpgadataflow.globalaccpool_batch import GlobalAccPool_Batch
+from finn.custom_op.fpgadataflow.pool_batch import Pool_Batch
+from finn.custom_op.fpgadataflow.fmpadding_batch import FMPadding_Batch
+from finn.custom_op.fpgadataflow.thresholding_batch import Thresholding_Batch
+from finn.custom_op.fpgadataflow.addstreams_batch import AddStreams_Batch
+from finn.custom_op.fpgadataflow.labelselect_batch import LabelSelect_Batch
+from finn.custom_op.quantavgpool2d import QuantAvgPool2d
+from finn.custom_op.fpgadataflow.duplicatestreams_batch import DuplicateStreams_Batch
+from finn.custom_op.fpgadataflow.vector_vector_activate_batch import (
+    Vector_Vector_Activate_Batch,
+)
+from finn.custom_op.fpgadataflow.channelwise_op_batch import ChannelwiseOp_Batch
+from finn.custom_op.fpgadataflow.iodma import IODMA
+from finn.custom_op.debugmarker import DebugMarker
 
 # create a mapping of all known CustomOp names and classes
 custom_op = {}
 
 custom_op["MultiThreshold"] = MultiThreshold
+custom_op["DownSampler"] = DownSampler
 custom_op["XnorPopcountMatMul"] = XnorPopcountMatMul
 custom_op["Im2Col"] = Im2Col
 custom_op["StreamingMaxPool_Batch"] = StreamingMaxPool_Batch
@@ -58,6 +74,18 @@ custom_op["StreamingDataflowPartition"] = StreamingDataflowPartition
 custom_op["MaxPoolNHWC"] = MaxPoolNHWC
 custom_op["StreamingDataWidthConverter_Batch"] = StreamingDataWidthConverter_Batch
 custom_op["StreamingFIFO"] = StreamingFIFO
+custom_op["GlobalAccPool_Batch"] = GlobalAccPool_Batch
+custom_op["Pool_Batch"] = Pool_Batch
+custom_op["FMPadding_Batch"] = FMPadding_Batch
+custom_op["Thresholding_Batch"] = Thresholding_Batch
+custom_op["AddStreams_Batch"] = AddStreams_Batch
+custom_op["LabelSelect_Batch"] = LabelSelect_Batch
+custom_op["QuantAvgPool2d"] = QuantAvgPool2d
+custom_op["DuplicateStreams_Batch"] = DuplicateStreams_Batch
+custom_op["Vector_Vector_Activate_Batch"] = Vector_Vector_Activate_Batch
+custom_op["ChannelwiseOp_Batch"] = ChannelwiseOp_Batch
+custom_op["IODMA"] = IODMA
+custom_op["DebugMarker"] = DebugMarker
 
 
 def getCustomOp(node):
diff --git a/src/finn/custom_op/streamingdataflowpartition.py b/src/finn/custom_op/streamingdataflowpartition.py
index b63326d676f4ded5ec1dd62f5cc7f02d7acb82ad..31cd38fea3c5a9e88084c3332d46aebdb065f800 100644
--- a/src/finn/custom_op/streamingdataflowpartition.py
+++ b/src/finn/custom_op/streamingdataflowpartition.py
@@ -36,7 +36,12 @@ class StreamingDataflowPartition(CustomOp):
     bitfile by itself."""
 
     def get_nodeattr_types(self):
-        return {"model": ("s", True, "")}
+        return {
+            "model": ("s", True, ""),
+            "res_estimate": ("s", False, ""),
+            "res_hls": ("s", False, ""),
+            "res_synth": ("s", False, ""),
+        }
 
     def make_shape_compatible_op(self, model):
         pass
@@ -83,7 +88,7 @@ class StreamingDataflowPartition(CustomOp):
             )
 
         # verify the number of inputs
-        if len(self.onnx_node.input) == 1:
+        if len(self.onnx_node.input) >= 1:
             info_messages.append("The number of inputs is correct")
         else:
             info_messages.append("StreamingDataflowPartition needs 1 data input")
diff --git a/src/finn/transformation/bipolar_to_xnor.py b/src/finn/transformation/bipolar_to_xnor.py
index 4c7ebaf04e35f94e84e52e0b4520ee2369502120..80f2a73351f8548c99efd8dedd8a04d44c8558a3 100644
--- a/src/finn/transformation/bipolar_to_xnor.py
+++ b/src/finn/transformation/bipolar_to_xnor.py
@@ -27,6 +27,7 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import numpy as np
+import warnings
 from onnx import TensorProto
 from onnx import helper as oh
 
@@ -35,6 +36,7 @@ from finn.transformation import Transformation
 from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.infer_datatypes import InferDataTypes
 from finn.util.basic import get_by_name
+from finn.custom_op.registry import getCustomOp
 
 
 class ConvertBipolarMatMulToXnorPopcount(Transformation):
@@ -65,17 +67,40 @@ class ConvertBipolarMatMulToXnorPopcount(Transformation):
 
                     mt_chain = model.find_upstream(mm_input, find_prod_mt)
                     if len(mt_chain) == 0:
-                        raise Exception(
-                            """Could not find upstream bipolar
-                                            MultiThreshold"""
-                        )
-                    graph_modified = True
-                    mt = mt_chain[-1]
-                    bin_dt_attr = "BINARY".encode("utf-8")
-                    get_by_name(mt.attribute, "out_dtype").s = bin_dt_attr
-                    get_by_name(mt.attribute, "out_scale").f = 1.0
-                    get_by_name(mt.attribute, "out_bias").f = 0
-                    model.set_tensor_datatype(mm_input, DataType.BINARY)
+                        if mm_input == graph.input[0].name:
+                            # change input datatype to BINARY
+                            model.set_tensor_datatype(mm_input, DataType.BINARY)
+                            graph_modified = True
+                            warnings.warn(
+                                """IMPORTANT: Changing graph input DataType
+                            to BINARY instead of BIPOLAR. Ensure this is respected
+                            when checking for correctness.
+                            """
+                            )
+                        else:
+                            raise Exception(
+                                """Could not find upstream bipolar
+                                   MultiThreshold, and the MatMul is not the
+                                   first node on graph input. Unable to convert
+                                   input tensor from BIPOLAR to BINARY."""
+                            )
+                    else:
+                        graph_modified = True
+                        mt = mt_chain[-1]
+                        mt_inst = getCustomOp(mt)
+                        # ensure old scale/bias were correct for BIPOLAR
+                        scale_ok = mt_inst.get_nodeattr("out_scale") == 2.0
+                        bias_ok = mt_inst.get_nodeattr("out_bias") == -1.0
+                        assert (
+                            scale_ok and bias_ok
+                        ), """Unexpected scale/bias
+                        attributes for BIPOLAR MultiThreshold node."""
+                        # start conversion, set MT output to binary
+                        # (this is what XnorPopcountMatMul expects)
+                        mt_inst.set_nodeattr("out_dtype", "BINARY")
+                        mt_inst.set_nodeattr("out_scale", 1.0)
+                        mt_inst.set_nodeattr("out_bias", 0.0)
+                        model.set_tensor_datatype(mm_input, DataType.BINARY)
                     # change node type and domain
                     n.op_type = "XnorPopcountMatMul"
                     n.domain = "finn"
diff --git a/src/finn/transformation/change_datalayout.py b/src/finn/transformation/change_datalayout.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5b393a25e57122b059a44f70904a6dbe5bbaa3f
--- /dev/null
+++ b/src/finn/transformation/change_datalayout.py
@@ -0,0 +1,110 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from onnx import helper, TensorProto
+
+from finn.transformation import Transformation
+from finn.transformation.infer_shapes import InferShapes
+from finn.util.basic import get_by_name
+
+
+class ChangeDataLayoutQuantAvgPool2d(Transformation):
+    """Replace QuantAvgPool2d with datalayout (N,C,H,W) with Transpose nodes
+    and QuantAvgPool2dNHWC with datalayout (N,H,W,C)"""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if n.op_type == "QuantAvgPool2d" and (
+                get_by_name(n.attribute, "data_layout") is None
+                or get_by_name(n.attribute, "data_layout").s.decode("UTF-8") == "NCHW"
+            ):
+                graph_modified = True
+                node_input = n.input[0]
+                node_output = n.output[0]
+                s = get_by_name(n.attribute, "stride").i
+                k = get_by_name(n.attribute, "kernel").i
+                ibits = get_by_name(n.attribute, "ibits").i
+                obits = get_by_name(n.attribute, "obits").i
+                signed = get_by_name(n.attribute, "signed").i
+                batchsize = model.get_tensor_shape(n.input[0])[0]  # assume NCHW
+                channels = model.get_tensor_shape(n.input[0])[1]  # assume NCHW
+                idim = model.get_tensor_shape(n.input[0])[-1]  # assume NCHW
+                odim = model.get_tensor_shape(n.output[0])[-1]  # assume NCHW
+
+                # create new nodes
+                # NCHW -> NHWC
+                # create new intermediate values
+                inp_trans_out = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(),
+                    TensorProto.FLOAT,
+                    (batchsize, idim, idim, channels),  # NHWC
+                )
+                graph.value_info.append(inp_trans_out)
+                inp_trans_out = inp_trans_out.name
+                quantavg_out = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(),
+                    TensorProto.FLOAT,
+                    (batchsize, odim, odim, channels),
+                )
+                graph.value_info.append(quantavg_out)
+                quantavg_out = quantavg_out.name
+                inp_trans_node = helper.make_node(
+                    "Transpose", [node_input], [inp_trans_out], perm=[0, 2, 3, 1]
+                )
+                quantavg_node = helper.make_node(
+                    "QuantAvgPool2d",
+                    [inp_trans_out],
+                    [quantavg_out],
+                    domain="finn",
+                    stride=s,
+                    kernel=k,
+                    ibits=ibits,
+                    obits=obits,
+                    signed=signed,
+                    data_layout="NHWC",
+                )
+                # NHWC -> NCHW
+                out_trans_node = helper.make_node(
+                    "Transpose", [quantavg_out], [node_output], perm=[0, 3, 1, 2]
+                )
+                # insert nodes
+                graph.node.insert(node_ind, inp_trans_node)
+                graph.node.insert(node_ind + 1, quantavg_node)
+                graph.node.insert(node_ind + 2, out_trans_node)
+                # remove old nodes
+                graph.node.remove(n)
+
+                # set shapes
+                model.set_tensor_shape(inp_trans_out, (batchsize, idim, idim, channels))
+                model.set_tensor_shape(quantavg_out, (batchsize, odim, odim, channels))
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
diff --git a/src/finn/transformation/fpgadataflow/annotate_cycles.py b/src/finn/transformation/fpgadataflow/annotate_cycles.py
new file mode 100644
index 0000000000000000000000000000000000000000..521c84952daf25982e574421dfba3ff0f7df91ae
--- /dev/null
+++ b/src/finn/transformation/fpgadataflow/annotate_cycles.py
@@ -0,0 +1,59 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import finn.custom_op.registry as registry
+from finn.transformation import Transformation
+from finn.transformation.move_reshape import _is_fpgadataflow_node
+from finn.core.modelwrapper import ModelWrapper
+from finn.custom_op.registry import getCustomOp
+
+
+class AnnotateCycles(Transformation):
+    """Annotate the estimate of clock cycles per sample taken by each fpgadataflow
+    node as an attribute on the node.
+    """
+
+    def __init__(self):
+        super().__init__()
+
+    def apply(self, model):
+        graph = model.graph
+        # annotate node cycles
+        for node in graph.node:
+            if _is_fpgadataflow_node(node):
+                op_inst = registry.getCustomOp(node)
+                cycles = op_inst.get_exp_cycles()
+                op_inst.set_nodeattr("cycles_estimate", cycles)
+            elif node.op_type == "StreamingDataflowPartition":
+                # recurse into model to manually annotate per-layer cycles
+                sdp_model_filename = getCustomOp(node).get_nodeattr("model")
+                sdp_model = ModelWrapper(sdp_model_filename)
+                sdp_model = sdp_model.transform(AnnotateCycles())
+                # save transformed model
+                sdp_model.save(sdp_model_filename)
+        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/annotate_resources.py b/src/finn/transformation/fpgadataflow/annotate_resources.py
index 207075b00de1871da19ea78472125d435449ed6e..d6ff058848700b50dadb7a6ed0ff6c07b7eeb4a3 100644
--- a/src/finn/transformation/fpgadataflow/annotate_resources.py
+++ b/src/finn/transformation/fpgadataflow/annotate_resources.py
@@ -32,6 +32,8 @@ from finn.transformation.move_reshape import _is_fpgadataflow_node
 from finn.analysis.fpgadataflow.res_estimation import res_estimation
 from finn.analysis.fpgadataflow.hls_synth_res_estimation import hls_synth_res_estimation
 from finn.analysis.fpgadataflow.post_synth_res import post_synth_res
+from finn.core.modelwrapper import ModelWrapper
+from finn.custom_op.registry import getCustomOp
 
 
 class AnnotateResources(Transformation):
@@ -39,14 +41,16 @@ class AnnotateResources(Transformation):
     node as an attribute on the node, depending on the mode parameter:
     * 'estimate' -- use the analytical estimation model
     * 'hls' -- use results from the HLS synthesis report
+    * 'synth' -- use post-synthesis (Vivado or Vitis) report
 
     No annotations can be provided unless the relevant transformation for the
     chosen mode (e.g. HLSSynthIP for hls) was previously run.
     """
 
-    def __init__(self, mode):
+    def __init__(self, mode, override_res_dict=None):
         super().__init__()
         self.mode = mode
+        self.res_dict = override_res_dict
 
     def apply(self, model):
         graph = model.graph
@@ -58,10 +62,33 @@ class AnnotateResources(Transformation):
             res_fxn = post_synth_res
         else:
             raise Exception("Unrecognized mode for AnnotateResources")
-        res_dict = model.analysis(res_fxn)
+        if self.res_dict is None:
+            self.res_dict = model.analysis(res_fxn)
+        children_dict = {}
+        # annotate node resources
+        for node in graph.node:
+            if _is_fpgadataflow_node(node) and node.name in self.res_dict.keys():
+                op_inst = registry.getCustomOp(node)
+                op_inst.set_nodeattr("res_" + self.mode, str(self.res_dict[node.name]))
+                children_dict[node.name] = self.res_dict[node.name]
+            elif node.op_type == "StreamingDataflowPartition":
+                # recurse into model to manually annotate per-layer resources
+                sdp_model_filename = getCustomOp(node).get_nodeattr("model")
+                sdp_model = ModelWrapper(sdp_model_filename)
+                sdp_model = sdp_model.transform(
+                    AnnotateResources(self.mode, self.res_dict)
+                )
+                sdp_dict = sdp_model.get_metadata_prop("res_total_" + self.mode)
+                sdp_dict = eval(sdp_dict)
+                # save transformed model
+                sdp_model.save(sdp_model_filename)
+                # set res attribute for sdp node
+                getCustomOp(node).set_nodeattr("res_" + self.mode, str(sdp_dict))
+                children_dict[node.name] = sdp_dict
+        self.res_dict.update(children_dict)
         total_dict = {}
-        for lname in res_dict.keys():
-            layer_res_dict = res_dict[lname]
+        for lname in children_dict.keys():
+            layer_res_dict = self.res_dict[lname]
             for r_type in layer_res_dict.keys():
                 r_amount = layer_res_dict[r_type]
                 r_amount = float(r_amount)
@@ -69,10 +96,11 @@ class AnnotateResources(Transformation):
                     total_dict[r_type] += r_amount
                 else:
                     total_dict[r_type] = r_amount
+        for k in total_dict.keys():
+            if "efficiency" in k:
+                total_dict[k] = total_dict[k] / len(graph.node)
         model.set_metadata_prop("res_total_" + self.mode, str(total_dict))
-        for node in graph.node:
-            if _is_fpgadataflow_node(node) and node.name in res_dict.keys():
-                op_inst = registry.getCustomOp(node)
-                op_inst.set_nodeattr("res_" + self.mode, str(res_dict[node.name]))
-
+        if "(top)" in self.res_dict.keys():
+            top_dict = self.res_dict["(top)"]
+            model.set_metadata_prop("res_total_top_" + self.mode, str(top_dict))
         return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py b/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py
index dbd98623c4cdf5baca9fa9c137debf8be0f70981..d4d5b006493b8db1da0184e98ba35493d3e6ccbd 100644
--- a/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py
+++ b/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py
@@ -26,13 +26,23 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-from onnx import helper
+
+from onnx import helper, TensorProto
+import numpy as np
+import warnings
 
 from finn.core.datatype import DataType
 from finn.transformation import Transformation
 from finn.custom_op.registry import getCustomOp
 from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.general import SortGraph
+import finn.core.data_layout as DataLayout
+from finn.util.onnx import nchw_to_nhwc
+from finn.util.basic import get_by_name
+from finn.transformation.fpgadataflow.minimize_accumulator_width import (
+    MinimizeAccumulatorWidth,
+)
 
 
 class InferConvInpGen(Transformation):
@@ -50,35 +60,95 @@ class InferConvInpGen(Transformation):
                 i2c_in_shape = model.get_tensor_shape(i2c_input)
                 i2c_out_shape = model.get_tensor_shape(i2c_output)
                 dt = model.get_tensor_datatype(i2c_input)
+                if not dt.is_integer():
+                    warnings.warn("Input is not int. Can't infer ConvInpGen")
+                    continue
                 i2c_inst = getCustomOp(n)
                 stride = i2c_inst.get_nodeattr("stride")
                 k = i2c_inst.get_nodeattr("kernel_size")
                 pad = i2c_inst.get_nodeattr("pad_amount")
                 pad_val = i2c_inst.get_nodeattr("pad_value")
+                depthwise = i2c_inst.get_nodeattr("depthwise")
                 ifm_ch = i2c_in_shape[-1]
                 ifm_dim = i2c_in_shape[1]
                 ofm_dim = i2c_out_shape[1]
-                # if padding enabled, ensure pad_val supported by DataType
+
+                # default params for ConvolutionInputGenerator
+                ConvInpGen_node_idx = node_ind
+                ConvInpGen_input = i2c_input
+                ConvInpGen_idim = ifm_dim
+
                 if pad > 0:
-                    assert dt.allowed(pad_val), "Im2Col DataType must support pad_val"
-                # create equivalent ConvolutionInputGenerator node
-                # TODO support padding
-                new_node = helper.make_node(
-                    "ConvolutionInputGenerator",
-                    [i2c_input],
-                    [i2c_output],
-                    domain="finn",
-                    backend="fpgadataflow",
-                    ConvKernelDim=k,
-                    IFMChannels=ifm_ch,
-                    IFMDim=ifm_dim,
-                    OFMDim=ofm_dim,
-                    SIMD=ifm_ch,
-                    Stride=stride,
-                    inputDataType=dt.name,
-                    outputDataType=dt.name,
-                )
-                graph.node.insert(node_ind, new_node)
+                    # if padding enabled, ensure pad_val supported by DataType
+                    # assert dt.allowed(pad_val),"""FMPadding_Batch DataType
+                    # must support pad_val"""
+                    assert (
+                        pad_val == 0
+                    ), "FMPadding_Batch doesn't currently support pad_val!= 0"
+
+                    odim_padding = ifm_dim + 2 * pad
+
+                    padding_out = helper.make_tensor_value_info(
+                        model.make_new_valueinfo_name(),
+                        TensorProto.FLOAT,
+                        (1, odim_padding, odim_padding, ifm_ch),
+                    )
+                    graph.value_info.append(padding_out)
+                    padding_out = padding_out.name
+                    model.set_tensor_datatype(padding_out, dt)
+
+                    ConvInpGen_node_idx += 1
+                    ConvInpGen_input = padding_out
+                    ConvInpGen_idim = odim_padding
+
+                    padding_node = helper.make_node(
+                        "FMPadding_Batch",
+                        [i2c_input],
+                        [padding_out],
+                        domain="finn",
+                        backend="fpgadataflow",
+                        ImgDim=ifm_dim,
+                        Padding=2 * pad,
+                        NumChannels=ifm_ch,
+                        inputDataType=dt.name,
+                        SIMD=ifm_ch,
+                    )
+                    graph.node.insert(node_ind, padding_node)
+
+                if stride > 1 and k == 1:
+                    # create DownSampler node
+                    ConvInpGen_node = helper.make_node(
+                        "DownSampler",
+                        [ConvInpGen_input],
+                        [i2c_output],
+                        domain="finn",
+                        backend="fpgadataflow",
+                        ImgDim=ConvInpGen_idim,
+                        NumChannels=ifm_ch,
+                        SIMD=ifm_ch,
+                        Stride=stride,
+                        inputDataType=dt.name,
+                    )
+                    graph.node.insert(ConvInpGen_node_idx, ConvInpGen_node)
+                else:
+                    # create equivalent ConvolutionInputGenerator node
+                    ConvInpGen_node = helper.make_node(
+                        "ConvolutionInputGenerator",
+                        [ConvInpGen_input],
+                        [i2c_output],
+                        domain="finn",
+                        backend="fpgadataflow",
+                        ConvKernelDim=k,
+                        IFMChannels=ifm_ch,
+                        IFMDim=ConvInpGen_idim,
+                        OFMDim=ofm_dim,
+                        SIMD=ifm_ch,
+                        Stride=stride,
+                        inputDataType=dt.name,
+                        outputDataType=dt.name,
+                        depthwise=depthwise,
+                    )
+                    graph.node.insert(ConvInpGen_node_idx, ConvInpGen_node)
                 # remove old nodes
                 graph.node.remove(n)
                 graph_modified = True
@@ -134,6 +204,167 @@ class InferStreamingMaxPool(Transformation):
         return (model, graph_modified)
 
 
+class InferPool_Batch(Transformation):
+    """If kernel_shape > strides, replace Pool layer with  with of Im2col
+    + pool(with kernel_shape == strides), plus Transpose layers to keep the original
+    data layout."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if n.op_type in ["MaxPool", "QuantAvgPool2d"]:
+                # extract pool parameters
+
+                if n.op_type == "MaxPool":
+                    k = get_by_name(n.attribute, "kernel_shape").ints[-1]
+                    stride = get_by_name(n.attribute, "strides").ints[-1]
+                elif n.op_type == "QuantAvgPool2d":
+                    inst = getCustomOp(n)
+                    k = inst.get_nodeattr("kernel")
+                    stride = inst.get_nodeattr("stride")
+
+                try:
+                    pad = get_by_name(n.attribute, "pads").ints[-1]
+                except AttributeError:
+                    pad = 0
+
+                node_input = n.input[0]
+                node_output = n.output[0]
+                idt = model.get_tensor_datatype(node_input)
+
+                if not idt.is_integer():
+                    continue
+
+                if k < stride:
+                    continue
+                elif k == stride:
+                    warnings.warn(
+                        """Inferring Pool_Batch node for k == stride.
+                        This case can be optimized.
+                        For example, for MaxPool run InferStreamingMaxPool before
+                        InferPool_Batch """
+                    )
+
+                odt = model.get_tensor_datatype(node_output)
+
+                ifm_ch = model.get_tensor_shape(n.input[0])[1]  # assume NCHW
+                ofm_ch = ifm_ch
+                ifm_dim = model.get_tensor_shape(n.input[0])[-1]  # assume NCHW
+                ofm_dim = model.get_tensor_shape(n.output[0])[-1]  # assume NCHW
+                # create new intermediate values
+                inp_trans_out = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(),
+                    TensorProto.FLOAT,
+                    (1, ifm_dim, ifm_dim, ifm_ch),  # NHWC
+                )
+                graph.value_info.append(inp_trans_out)
+                inp_trans_out = inp_trans_out.name
+                model.set_tensor_datatype(inp_trans_out, idt)
+
+                im2col_out = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(),
+                    TensorProto.FLOAT,
+                    (1, ofm_dim, ofm_dim, ifm_ch * k * k),
+                )
+                graph.value_info.append(im2col_out)
+                im2col_out = im2col_out.name
+                model.set_tensor_datatype(im2col_out, idt)
+
+                pool_output = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(),
+                    TensorProto.FLOAT,
+                    (1, ofm_dim, ofm_dim, ofm_ch),
+                )
+                graph.value_info.append(pool_output)
+                pool_output = pool_output.name
+                # model.set_tensor_datatype(pool_output, odt)
+
+                # create new nodes
+                # NCHW -> NHWC
+                inp_trans_node = helper.make_node(
+                    "Transpose", [node_input], [inp_trans_out], perm=[0, 2, 3, 1]
+                )
+
+                accum_bits = 0
+                pool_size_param = k
+                pad_value = 0
+                if n.op_type == "MaxPool":
+                    pool_fxn = "MaxPool"
+                    odt = idt
+                    pad_value = idt.min()
+                elif n.op_type == "QuantAvgPool2d":
+                    assert odt.is_integer(), """Output data type for QuantAvgPool2d
+                    needs to be integer"""
+                    assert pad == 0, "Padding is not supported for QuantAvgPool2d"
+                    inst = getCustomOp(n)
+                    pool_fxn = "QuantAvgPool"
+                    pool_size_param = inst.get_shifts()
+                    accum_bits = inst.get_accum_size()
+
+                else:
+                    raise Exception(
+                        "pad_value and pool_fxn not configured for {}".format(n.op_type)
+                    )
+
+                # format input tensor
+                im2col_node = helper.make_node(
+                    "Im2Col",
+                    [inp_trans_out],
+                    [im2col_out],
+                    domain="finn",
+                    stride=stride,
+                    kernel_size=k,
+                    pad_amount=pad,
+                    pad_value=pad_value,
+                    depthwise=1,
+                    input_shape="(1,{},{},{})".format(ifm_dim, ifm_dim, ifm_ch),
+                )
+
+                # Warning PE has to be equal to ifm_ch until Im2Col is replaced by
+                # ConvolutionInputGenerator with depthwise=1.
+                # For other settings the output will be incorrect due to incorrect input
+                # data layout
+                pool_node = helper.make_node(
+                    "Pool_Batch",
+                    [im2col_out],
+                    [pool_output],
+                    domain="finn",
+                    backend="fpgadataflow",
+                    InputDataType=idt.name,
+                    OutputDataType=odt.name,
+                    Channels=ifm_ch,
+                    PE=ifm_ch,
+                    KernelSize=k,
+                    Function=pool_fxn,
+                    OutImgDim=ofm_dim,
+                    AccumBits=accum_bits,
+                    Size=pool_size_param,
+                    BatchSize=1,
+                )
+
+                # NHWC -> NCHW
+                out_trans_node = helper.make_node(
+                    "Transpose", [pool_output], [node_output], perm=[0, 3, 1, 2]
+                )
+
+                # insert nodes where the conv is to preserve topological ordering
+                graph.node.insert(node_ind, inp_trans_node)
+                graph.node.insert(node_ind + 1, im2col_node)
+                graph.node.insert(node_ind + 2, pool_node)
+                graph.node.insert(node_ind + 3, out_trans_node)
+                # remove old node
+                graph.node.remove(n)
+                graph_modified = True
+
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
 class InferBinaryStreamingFCLayer(Transformation):
     """Convert XnorPopcountMatMul layers to
     StreamingFCLayer_Batch layers. Any immediately following MultiThreshold
@@ -261,6 +492,7 @@ class InferBinaryStreamingFCLayer(Transformation):
                     graph.node.remove(n)
                     graph_modified = True
         if graph_modified:
+            model = model.transform(MinimizeAccumulatorWidth())
             model = model.transform(InferShapes())
             model = model.transform(InferDataTypes())
         return (model, graph_modified)
@@ -281,7 +513,7 @@ class InferQuantizedStreamingFCLayer(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "MatMul":
+            if n.op_type == "MatMul" and model.get_tensor_sparsity(n.input[1]) is None:
                 mm_input = n.input[0]
                 mm_weight = n.input[1]
                 mm_output = n.output[0]
@@ -394,6 +626,689 @@ class InferQuantizedStreamingFCLayer(Transformation):
                         # remove old node
                         graph.node.remove(n)
                         graph_modified = True
+        if graph_modified:
+            model = model.transform(MinimizeAccumulatorWidth())
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferVVAU(Transformation):
+    """Convert MatMul layers with quantized inputs and weights to
+    Vector_Vector_Activate_Batch layers, if the sparsity annotation
+    of the weight matrix indicates that the MatMul layer belongs to
+    a depthwise convolution. Any immediately following MultiThreshold
+    layers will also be absorbed into the VVAU."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if (
+                n.op_type == "MatMul"
+                and model.get_tensor_sparsity(n.input[1]) is not None
+            ):
+                sparsity = model.get_tensor_sparsity(n.input[1])
+                try:
+                    k = sparsity["dw"]["kernel_shape"]
+                except KeyError:
+                    raise Exception(
+                        """Sparsity doesn't indicate that MatMul
+                        belongs to a depthwise convolution."""
+                    )
+
+                mm_input = n.input[0]
+                mm_weight = n.input[1]
+                mm_output = n.output[0]
+                mm_in_shape = model.get_tensor_shape(mm_input)
+                mm_out_shape = model.get_tensor_shape(mm_output)
+                idt = model.get_tensor_datatype(mm_input)
+                wdt = model.get_tensor_datatype(mm_weight)
+                if idt.is_integer() and wdt.is_integer():
+                    mm_output = n.output[0]
+                    W = model.get_initializer(mm_weight)
+                    # infer dense weight tensor from sparse weight matrix
+                    # kernel size k which was extracted above and the value of
+                    # the channels is used.
+                    # the weight matrix has a shape of (k * k * Channels, Channels)
+                    # we need to reverse the creation of the sparse weight matrix
+                    # to achieve a weight tensor of shape (Channels, 1, k, k)
+                    channels = int(W.shape[1])
+                    # transpose to achieve a shape of (k * k * Channels, Channels)
+                    W = W.T
+                    # reshape to (Channels, k, k, Channels) to transpose afterwards
+                    # to (Channels, Channels, k, k)
+                    W = W.reshape(channels, k, k, channels)
+                    W = W.transpose(0, 3, 1, 2)
+                    # now we can extract the values using a for loop over the channels
+                    # and fill a zero numpy array in the correct shape
+                    w_tensor = np.zeros((channels, 1, k, k))
+                    for ch in range(channels):
+                        w_tensor[ch][0] = W[ch][ch]
+                    model.set_initializer(mm_weight, w_tensor)
+                    model.set_tensor_shape(mm_weight, (channels, 1, k, k))
+                    # create node with pe=channels as default
+                    pe = channels
+                    assert (
+                        channels % pe == 0
+                    ), "Requirement Channels divisable by PE is violated."
+                    # see if we have any following thresholds
+                    consumer = model.find_consumer(mm_output)
+                    if consumer is not None and consumer.op_type == "MultiThreshold":
+                        # create VVAU (i.e. including activation)
+                        mt_output = consumer.output[0]
+                        mt_out_shape = model.get_tensor_shape(mt_output)
+                        mt_thres = consumer.input[1]
+                        T = model.get_initializer(mt_thres)
+                        assert (
+                            T.shape[0] == 1 or T.shape[0] == channels
+                        ), """First dimension of
+                        thresholds neither 1 nor Channels."""
+                        odt = model.get_tensor_datatype(mt_output)
+                        scale = getCustomOp(consumer).get_nodeattr("out_scale")
+                        assert (
+                            scale == 1.0
+                        ), "out_scale must be equal to 1.0 for HLS conversion."
+                        actval = getCustomOp(consumer).get_nodeattr("out_bias")
+                        assert (
+                            int(actval) == actval
+                        ), "out_bias must be integer for HLS conversion."
+                        actval = int(actval)
+                        assert (not odt.signed()) or (
+                            actval < 0
+                        ), "Signed output requres actval < 0"
+                        model.set_tensor_shape(mm_input, mm_in_shape)
+                        model.set_tensor_shape(mt_output, mt_out_shape)
+                        # create and insert new Vector_Vector_Activate_Batch node
+                        new_node = helper.make_node(
+                            "Vector_Vector_Activate_Batch",
+                            [mm_input, mm_weight, mt_thres],
+                            [mt_output],
+                            domain="finn",
+                            backend="fpgadataflow",
+                            resType="ap_resource_lut()",
+                            PE=pe,
+                            Dim=mm_in_shape[1],
+                            Channels=channels,
+                            Kernel=k,
+                            inputDataType=idt.name,
+                            weightDataType=wdt.name,
+                            outputDataType=odt.name,
+                            ActVal=actval,
+                            noActivation=0,
+                        )
+                        graph.node.insert(node_ind, new_node)
+                        # remove old nodes
+                        graph.node.remove(n)
+                        graph.node.remove(consumer)
+                        graph_modified = True
+                    else:
+                        # no activation, matmul only
+                        odt = model.get_tensor_datatype(mm_output)
+                        model.set_tensor_shape(mm_input, mm_in_shape)
+                        model.set_tensor_shape(mm_output, mm_out_shape)
+                        # create and insert new VVAU node
+                        new_node = helper.make_node(
+                            "Vector_Vector_Activate_Batch",
+                            [mm_input, mm_weight],
+                            [mm_output],
+                            domain="finn",
+                            backend="fpgadataflow",
+                            resType="ap_resource_lut()",
+                            PE=pe,
+                            Dim=mm_in_shape[1],
+                            Channels=channels,
+                            Kernel=k,
+                            inputDataType=idt.name,
+                            weightDataType=wdt.name,
+                            outputDataType=odt.name,
+                            ActVal=0,
+                            noActivation=1,
+                        )
+                        graph.node.insert(node_ind, new_node)
+                        # remove old node
+                        graph.node.remove(n)
+                        graph_modified = True
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferThresholdingLayer(Transformation):
+    """Convert any MultiThreshold into a standalone thresholding HLS layer."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for node in graph.node:
+            node_ind += 1
+            if node.op_type == "MultiThreshold":
+                thl_input = node.input[0]
+                thl_threshold = node.input[1]
+                thl_output = node.output[0]
+                thl_in_shape = model.get_tensor_shape(thl_input)
+                idt = model.get_tensor_datatype(thl_input)
+
+                # skip conversion for layers with float input
+                if not idt.is_integer():
+                    continue
+
+                # check layout of inputs/outputs, and convert if needed
+                # check layout and convert if necessary
+                thl_in_layout = model.get_tensor_layout(thl_input)
+                if thl_in_layout == DataLayout.NCHW:
+                    thl_input = nchw_to_nhwc(thl_input, model, node_ind)
+                    node_ind += 1
+                    thl_in_shape = model.get_tensor_shape(thl_input)
+
+                # keep track of where we need to insert the HLS Op
+                # it has to be ahead of the output transform
+                insert_point = node_ind
+                thl_output_layout = model.get_tensor_layout(thl_output)
+                if thl_output_layout == DataLayout.NCHW:
+                    thl_output = nchw_to_nhwc(thl_output, model, node_ind, reverse=True)
+                    node_ind += 1
+
+                # now safe to assume number of channels is in last dimension
+                ifc = int(thl_in_shape[-1])
+                # create node with no parallelization first
+                pe = 1
+                assert ifc % pe == 0, "Requirement IFC divisable by PE is violated."
+
+                odt = model.get_tensor_datatype(thl_output)
+                scale = getCustomOp(node).get_nodeattr("out_scale")
+                assert (
+                    scale == 1.0
+                ), "MultiThreshold out_scale must be equal to 1.0 for HLS conversion."
+                actval = getCustomOp(node).get_nodeattr("out_bias")
+                assert (
+                    int(actval) == actval
+                ), "MultiThreshold out_bias must be integer for HLS conversion."
+                actval = int(actval)
+                assert (not odt.signed()) or (
+                    actval < 0
+                ), "Signed output requres actval < 0"
+                # create and insert new Thresholding_Batch node
+                new_node = helper.make_node(
+                    "Thresholding_Batch",
+                    [thl_input, thl_threshold],
+                    [thl_output],
+                    domain="finn",
+                    backend="fpgadataflow",
+                    NumChannels=ifc,
+                    PE=pe,
+                    inputDataType=idt.name,
+                    outputDataType=odt.name,
+                    numInputVectors=list(thl_in_shape[:-1]),
+                    ActVal=actval,
+                )
+                graph.node.insert(insert_point, new_node)
+                # remove old node
+                graph.node.remove(node)
+                graph_modified = True
+
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferAddStreamsLayer(Transformation):
+    """Convert any Add into a AddStreams HLS layer."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for node in graph.node:
+            node_ind += 1
+            if node.op_type == "Add":
+                in0 = node.input[0]
+                in1 = node.input[1]
+                result = node.output[0]
+                in0_shape = model.get_tensor_shape(in0)
+                in1_shape = model.get_tensor_shape(in1)
+
+                # skip if different shapes on inputs
+                if in0_shape != in1_shape:
+                    continue
+
+                idt0 = model.get_tensor_datatype(in0)
+                idt1 = model.get_tensor_datatype(in1)
+
+                # skip if different data types on inputs
+                if idt0 != idt1:
+                    continue
+
+                idt = idt0
+
+                # skip conversion for layers with float input
+                if not idt.is_integer():
+                    continue
+
+                # check layout and convert if necessary
+                in0_layout = model.get_tensor_layout(in0)
+                in1_layout = model.get_tensor_layout(in1)
+                result_layout = model.get_tensor_layout(result)
+
+                if in0_layout == DataLayout.NCHW:
+                    in0 = nchw_to_nhwc(in0, model, node_ind)
+                    node_ind += 1
+                    in0_shape = model.get_tensor_shape(in0)
+
+                if in1_layout == DataLayout.NCHW:
+                    in1 = nchw_to_nhwc(in1, model, node_ind)
+                    node_ind += 1
+                    in1_shape = model.get_tensor_shape(in1)
+
+                # keep track of where we need to insert the HLS Op
+                # it has to be ahead of the output transform
+                insert_point = node_ind
+
+                if result_layout == DataLayout.NCHW:
+                    result = nchw_to_nhwc(result, model, node_ind, reverse=True)
+                    node_ind += 1
+
+                # now safe to assume num_channels is size of last dimension
+                num_channels = int(in0_shape[-1])
+                # create node with no parallelization first
+                pe = 1
+                assert (
+                    num_channels % pe == 0
+                ), "Requirement Channels divisable by PE is violated."
+
+                # create and insert new StreamingFCLayer node
+                new_node = helper.make_node(
+                    "AddStreams_Batch",
+                    [in0, in1],
+                    [result],
+                    domain="finn",
+                    backend="fpgadataflow",
+                    NumChannels=num_channels,
+                    PE=pe,
+                    inputDataType=idt.name,
+                    numInputVectors=in0_shape[:-1],
+                )
+                graph.node.insert(insert_point, new_node)
+                # remove old node
+                graph.node.remove(node)
+                graph_modified = True
+
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferDuplicateStreamsLayer(Transformation):
+    """Insert a DuplicateStreams HLS layer for any tensor with fanout == 2 """
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for node in graph.node:
+            node_ind += 1
+            successors = model.find_consumers(node.output[0])
+            if successors is not None and len(successors) == 2:
+                output_tensor = node.output[0]
+
+                dt = model.get_tensor_datatype(output_tensor)
+
+                # skip conversion for layers with float input
+                if not dt.is_integer():
+                    continue
+
+                # create clone tensors
+                out_shape = model.get_tensor_shape(output_tensor)
+                out_tensor_clones = []
+                for i in range(2):
+                    clone = helper.make_tensor_value_info(
+                        model.make_new_valueinfo_name(), TensorProto.FLOAT, out_shape
+                    )
+                    model.graph.value_info.append(clone)
+                    out_tensor_clones += [clone.name]
+
+                num_ch = int(out_shape[-1])
+                vecs = out_shape[:-1]
+
+                # create node with no parallelization first
+                pe = 1
+                assert (
+                    num_ch % pe == 0
+                ), "Requirement channels divisable by PE is violated."
+
+                dup_node = helper.make_node(
+                    "DuplicateStreams_Batch",
+                    [output_tensor],
+                    out_tensor_clones,
+                    domain="finn",
+                    backend="fpgadataflow",
+                    NumChannels=num_ch,
+                    PE=pe,
+                    inputDataType=dt.name,
+                    numInputVectors=vecs,
+                )
+
+                graph.node.insert(node_ind, dup_node)
+
+                # connect successors to out tensor clone
+                clone_idx = 0
+                for successor in successors:
+                    for i, succ_input in enumerate(successor.input):
+                        if succ_input == output_tensor:
+                            successor.input[i] = out_tensor_clones[clone_idx]
+                            clone_idx += 1
+                            # if one node has multiple connections to the same output
+                            # find_direct_successors will return one node per input
+                            # so break the inner loop will result in correct behaviour
+                            break
+
+                graph_modified = True
+
+        if graph_modified:
+            model = model.transform(SortGraph())
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferChannelwiseLinearLayer(Transformation):
+    """Convert any channel-wise Add/Mul into a HLS layer."""
+
+    def get_smallest_possible(self, vals):
+        """Returns smallest (fewest bits) possible DataType that can represent
+        value. Prefers unsigned integers where possible."""
+        vals = np.array(vals)
+        for v in vals:
+            assert int(v) == v, "Error float value"
+
+        for k in DataType.__members__:
+            dt = DataType[k]
+
+            if dt in [DataType.BIPOLAR, DataType.TERNARY, DataType.FLOAT32]:
+                # not currently supported
+                continue
+
+            if (dt.min() <= vals).all() and (vals <= dt.max()).all():
+                return dt
+
+        warnings.warn(
+            """InferChannelwiseLinearLayer: Output values may not be
+        representable with supported data types.
+        Setting maximum width data type available.
+        This will lead to errors if there are no constrains on the input
+        """
+        )
+
+        if (0 <= vals).all():
+            return DataType.UINT64
+        else:
+            return DataType.INT64
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for node in graph.node:
+            node_ind += 1
+            if node.op_type == "Add" or node.op_type == "Mul":
+                # assuming input[0] is dynamic
+                ll_input = node.input[0]
+                ll_output = node.output[0]
+                ll_in_shape = model.get_tensor_shape(ll_input)
+
+                # check if input 1 has an initializer
+                ll_const = node.input[1]
+                if ll_const is not None:
+                    ll_cinit = model.get_initializer(ll_const)
+                    if ll_cinit is None:
+                        # input 1 is also dynamic
+                        continue
+                else:
+                    continue
+
+                # get number of channels and channel index from input
+                ll_in_layout = model.get_tensor_layout(ll_input)
+                if ll_in_layout == DataLayout.NHWC or ll_in_layout == DataLayout.NC:
+                    ch_index = -1
+                    ch = ll_in_shape[-1]
+                elif ll_in_layout == DataLayout.NCHW:
+                    ch_index = 1
+                    ch = ll_in_shape[1]
+                else:
+                    continue
+
+                # check if the shape of initializer is compatible
+                ll_cinit_shape = list(ll_cinit.shape)
+                if np.prod(ll_cinit_shape) == 1:
+                    warnings.warn(
+                        "Broadcasting " + str(node.op_type) + "(" + node.name + ")"
+                    )
+                    ll_cinit = np.full((ch), ll_cinit.flatten()[0])
+                elif np.prod(ll_cinit_shape) != ch or ll_cinit_shape[ch_index] != ch:
+                    # parameter shape not compatible with Channelwise_batch
+                    continue
+
+                # check initializer contains integers as floats
+                if not (ll_cinit.astype(np.int32) == ll_cinit).all():
+                    continue
+                # all initializer conditions are met
+
+                # check inputs
+                idt = model.get_tensor_datatype(ll_input)
+                if not idt.is_integer():
+                    # skip conversion for layers with float input
+                    continue
+
+                # check layout of inputs/outputs, and convert if needed
+                # check layout and convert if necessary
+                if ll_in_layout == DataLayout.NCHW:
+                    ll_input = nchw_to_nhwc(ll_input, model, node_ind)
+                    node_ind += 1
+                    ll_in_shape = model.get_tensor_shape(ll_input)
+
+                # keep track of where we need to insert the HLS Op
+                # it has to be ahead of the output transform
+                insert_point = node_ind
+                ll_output_layout = model.get_tensor_layout(ll_output)
+                if ll_output_layout == DataLayout.NCHW:
+                    ll_output = nchw_to_nhwc(ll_output, model, node_ind, reverse=True)
+                    node_ind += 1
+
+                # get parameter data type
+                param_min = min(ll_cinit.flatten())
+                param_max = max(ll_cinit.flatten())
+                pdt = self.get_smallest_possible([param_min, param_max])
+
+                # set function and determine output data type
+                if node.op_type == "Add":
+                    func = "add"
+                    out_min = idt.min() + param_min
+                    out_max = idt.max() + param_max
+                    odt = self.get_smallest_possible([out_min, out_max])
+                elif node.op_type == "Mul":
+                    func = "mul"
+                    possible_limits = []
+                    possible_limits += [idt.min() * param_min]
+                    possible_limits += [idt.min() * param_max]
+                    possible_limits += [idt.max() * param_min]
+                    possible_limits += [idt.max() * param_max]
+                    odt = self.get_smallest_possible(possible_limits)
+
+                model.set_initializer(ll_const, ll_cinit.reshape(ch))
+                model.set_tensor_datatype(ll_output, odt)
+
+                # create node with no parallelization first
+                pe = 1
+                assert ch % pe == 0, "Requirement IFC divisable by PE is violated."
+                # create and insert node
+                new_node = helper.make_node(
+                    "ChannelwiseOp_Batch",
+                    [ll_input, ll_const],
+                    [ll_output],
+                    domain="finn",
+                    backend="fpgadataflow",
+                    Func=func,
+                    NumChannels=ch,
+                    PE=pe,
+                    inputDataType=idt.name,
+                    paramDataType=pdt.name,
+                    outputDataType=odt.name,
+                    numInputVectors=list(ll_in_shape[:-1]),
+                )
+                graph.node.insert(insert_point, new_node)
+                # remove old node
+                graph.node.remove(node)
+                graph_modified = True
+
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferLabelSelectLayer(Transformation):
+    """Convert any TopK into a LabelSelect HLS layer."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for node in graph.node:
+            node_ind += 1
+            if node.op_type == "TopK":
+                fc_input = node.input[0]
+                k_input = node.input[1]
+                val_output = node.output[0]
+                idx_output = node.output[1]
+                fc_in_shape = model.get_tensor_shape(fc_input)
+
+                idt = model.get_tensor_datatype(fc_input)
+
+                # skip conversion for layers with float input
+                if not idt.is_integer():
+                    continue
+
+                # skip conversion for if value output is connected (not supported)
+                if model.find_consumer(val_output) is not None:
+                    continue
+
+                num_labels = int(fc_in_shape[-1])
+                # create node with no parallelization first
+                pe = 1
+                assert (
+                    num_labels % pe == 0
+                ), "Requirement Labels divisable by PE is violated."
+
+                k = model.get_initializer(k_input)[0]
+
+                # create and insert new StreamingFCLayer node
+                new_node = helper.make_node(
+                    "LabelSelect_Batch",
+                    [fc_input],
+                    [idx_output],
+                    domain="finn",
+                    backend="fpgadataflow",
+                    Labels=num_labels,
+                    PE=pe,
+                    K=k,
+                    inputDataType=idt.name,
+                )
+                graph.node.insert(node_ind, new_node)
+                # remove old node
+                graph.node.remove(node)
+                graph_modified = True
+
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class InferGlobalAccPoolLayer(Transformation):
+    """Convert any GlobalAveragePool into a GlobalAccPool HLS layer and a scalar Mul."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for node in graph.node:
+            node_ind += 1
+            if node.op_type == "GlobalAveragePool":
+                in0 = node.input[0]
+                result = node.output[0]
+                in0_shape = model.get_tensor_shape(in0)
+
+                idt = model.get_tensor_datatype(in0)
+
+                # skip conversion for layers with float input
+                if not idt.is_integer():
+                    continue
+
+                # check layout and convert if necessary
+                in0_layout = model.get_tensor_layout(in0)
+                result_layout = model.get_tensor_layout(result)
+
+                if in0_layout == DataLayout.NCHW:
+                    in0 = nchw_to_nhwc(in0, model, node_ind)
+                    node_ind += 1
+                    in0_shape = model.get_tensor_shape(in0)
+
+                # keep track of where we need to insert the HLS Op
+                # it has to be ahead of the output transform
+                insert_point = node_ind
+
+                if result_layout == DataLayout.NCHW:
+                    result = nchw_to_nhwc(result, model, node_ind, reverse=True)
+                    node_ind += 1
+
+                num_ch = int(in0_shape[-1])
+                vecs = in0_shape[:-1]
+                # create node with no parallelization first
+                pe = 1
+                assert (
+                    num_ch % pe == 0
+                ), "Requirement Labels divisable by PE is violated."
+
+                # create an additional tensor of the same shape and layout as result
+                out_shape = model.get_tensor_shape(result)
+                pool_out = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(), TensorProto.FLOAT, out_shape
+                )
+                model.graph.value_info.append(pool_out)
+                pool_out = pool_out.name
+                model.set_tensor_layout(pool_out, model.get_tensor_layout(result))
+
+                new_pool = helper.make_node(
+                    "GlobalAccPool_Batch",
+                    [in0],
+                    [pool_out],
+                    domain="finn",
+                    backend="fpgadataflow",
+                    NumChannels=num_ch,
+                    PE=pe,
+                    inputDataType=idt.name,
+                    numInputVectors=vecs,
+                )
+
+                mul_value = helper.make_tensor_value_info(
+                    model.make_new_valueinfo_name(), TensorProto.FLOAT, [1]
+                )
+                model.graph.value_info.append(mul_value)
+                model.set_initializer(mul_value.name, np.array(1 / (vecs[1] * vecs[2])))
+                new_mul = helper.make_node("Mul", [pool_out, mul_value.name], [result],)
+                graph.node.insert(insert_point, new_pool)
+                graph.node.insert(insert_point + 1, new_mul)
+                node_ind += 1
+                # remove old node
+                graph.node.remove(node)
+                graph_modified = True
+
         if graph_modified:
             model = model.transform(InferShapes())
             model = model.transform(InferDataTypes())
diff --git a/src/finn/transformation/fpgadataflow/create_dataflow_partition.py b/src/finn/transformation/fpgadataflow/create_dataflow_partition.py
index e0f990600d9ca4be748b662b47ce8296d3d462ce..fb8b4358abd772d13c355f797649dc3b51975b4d 100644
--- a/src/finn/transformation/fpgadataflow/create_dataflow_partition.py
+++ b/src/finn/transformation/fpgadataflow/create_dataflow_partition.py
@@ -45,58 +45,91 @@ class CreateDataflowPartition(Transformation):
         super().__init__()
 
     def apply(self, model):
-        # TODO we currently assume that all dataflow nodes are connected to
-        # each other, forming a single partition. check the assumption and/or
-        # improve this.
-        all_nodes = list(model.graph.node)
-        df_nodes = filter(
-            lambda x: get_by_name(x.attribute, "backend") is not None, all_nodes
-        )
-        df_nodes = filter(
-            lambda x: get_by_name(x.attribute, "backend").s.decode("UTF-8")
-            == "fpgadataflow",
-            df_nodes,
-        )
-        df_nodes = list(df_nodes)
-        non_df_nodes = filter(lambda x: x not in df_nodes, all_nodes)
-        non_df_nodes = list(non_df_nodes)
-
-        if len(df_nodes) == 0:
-            # no changes if no dataflow nodes are present
-            return (model, False)
-        else:
-            # partition the model into two models
-            df_model = copy.deepcopy(model)
-            non_df_model = model
-            # remove all non-dataflow nodes from the dataflow model
-            for node_to_remove in non_df_nodes:
-                df_model.graph.node.remove(node_to_remove)
-            # identify the entry and exit points for the dataflow part
-            df_in = df_model.graph.node[0].input[0]
-            df_out = df_model.graph.node[-1].output[0]
-            df_in_vi = df_model.get_tensor_valueinfo(df_in)
-            df_out_vi = df_model.get_tensor_valueinfo(df_out)
-            # set df graph in/out to be df_in/df_out
-            df_model.graph.input.remove(df_model.graph.input[0])
-            df_model.graph.input.insert(0, df_in_vi)
-            df_model.graph.output.remove(df_model.graph.output[0])
-            df_model.graph.output.insert(0, df_out_vi)
-            df_model_dir = make_build_dir("dataflow_partition_")
-            df_model_filename = df_model_dir + "/df_model.onnx"
-            df_model.save(df_model_filename)
-            # remove all dataflow nodes from the non-dataflow model
-            # keep track of where the dataflow part starts
-            df_start_ind = all_nodes.index(df_nodes[0])
-            for node_to_remove in df_nodes:
-                non_df_model.graph.node.remove(node_to_remove)
-            # create StreamingDataflow node with df_in/df_out io
-            df_node = helper.make_node(
-                "StreamingDataflowPartition",
-                [df_in],
-                [df_out],
-                # use the model attribute to mark the df model
-                model=df_model_filename,
+        target_partition_id = 0
+        # we currently assume that all dataflow nodes belonging to the same partition
+        # are connected to each other and there is a single input/output to/from each.
+        # NOTE: all dataflow nodes with no partition_id set are moved to partition 0
+        # TODO: check the assumption and/or improve this.
+        while True:
+            all_nodes = list(model.graph.node)
+            df_nodes = filter(
+                lambda x: get_by_name(x.attribute, "backend") is not None, all_nodes
+            )
+            df_nodes = filter(
+                lambda x: get_by_name(x.attribute, "backend").s.decode("UTF-8")
+                == "fpgadataflow"
+                and (
+                    get_by_name(x.attribute, "partition_id") is None
+                    or get_by_name(x.attribute, "partition_id").i == target_partition_id
+                )
+                and x.op_type != "StreamingDataflowPartition",
+                df_nodes,
             )
-            non_df_model.graph.node.insert(df_start_ind, df_node)
+            df_nodes = list(df_nodes)
+            non_df_nodes = filter(lambda x: x not in df_nodes, all_nodes)
+            non_df_nodes = list(non_df_nodes)
+
+            if len(df_nodes) == 0:
+                # no changes if no dataflow nodes are present
+                break
+            else:
+                # partition the model into two models
+                df_model = copy.deepcopy(model)
+                non_df_model = model
+                # remove all non-dataflow nodes from the dataflow model
+                for node_to_remove in non_df_nodes:
+                    df_model.graph.node.remove(node_to_remove)
+                # identify the entry and exit points for the dataflow part
+                df_in = df_model.graph.node[0].input[0]
+                df_out = df_model.graph.node[-1].output[0]
+                df_in_vi = df_model.get_tensor_valueinfo(df_in)
+                df_out_vi = df_model.get_tensor_valueinfo(df_out)
+                # set df graph in/out to be df_in/df_out
+                df_model.graph.input.remove(df_model.graph.input[0])
+                df_model.graph.input.insert(0, df_in_vi)
+                df_model.graph.output.remove(df_model.graph.output[0])
+                df_model.graph.output.insert(0, df_out_vi)
+                # parse StreamingFCLayers looking for external weight memories
+                fc_extw_nodes = filter(
+                    lambda x: x.op_type == "StreamingFCLayer_Batch"
+                    and get_by_name(x.attribute, "mem_mode") is not None
+                    and get_by_name(x.attribute, "mem_mode").s.decode("UTF-8")
+                    == "external",
+                    df_nodes,
+                )
+                fc_extw_nodes = list(fc_extw_nodes)
+                extra_df_inputs = []
+
+                for i in range(len(fc_extw_nodes)):
+                    fc_weight_vi = df_model.get_tensor_valueinfo(
+                        fc_extw_nodes[i].input[1]
+                    )
+                    df_model.graph.input.insert(i + 1, fc_weight_vi)
+                    extra_df_inputs.append(fc_extw_nodes[i].input[1])
+
+                # save model
+                df_model_dir = make_build_dir(
+                    "dataflow_partition" + str(target_partition_id) + "_"
+                )
+                df_model_filename = df_model_dir + "/df_model.onnx"
+                df_model.cleanup()
+                df_model.save(df_model_filename)
+                # remove all dataflow nodes from the non-dataflow model
+                # keep track of where the dataflow part starts
+                df_start_ind = all_nodes.index(df_nodes[0])
+                for node_to_remove in df_nodes:
+                    non_df_model.graph.node.remove(node_to_remove)
+                # create StreamingDataflow node with df_in/df_out io
+                df_node = helper.make_node(
+                    "StreamingDataflowPartition",
+                    [df_in] + extra_df_inputs,
+                    [df_out],
+                    # use the model attribute to mark the df model
+                    model=df_model_filename,
+                    domain="finn",
+                )
+                non_df_model.graph.node.insert(df_start_ind, df_node)
+                model = non_df_model
+                target_partition_id += 1
 
-        return (non_df_model, False)
+        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/create_stitched_ip.py b/src/finn/transformation/fpgadataflow/create_stitched_ip.py
index c22a21ebdfd19178d3937de3a235dfadb7ee1d71..0def25d8429f5d3f6c02a9db656650bc1baba6ee 100644
--- a/src/finn/transformation/fpgadataflow/create_stitched_ip.py
+++ b/src/finn/transformation/fpgadataflow/create_stitched_ip.py
@@ -27,11 +27,17 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import os
+import warnings
 import subprocess
 
 from finn.transformation import Transformation
 from finn.util.basic import get_by_name, make_build_dir
 from finn.custom_op.registry import getCustomOp
+from finn.util.basic import get_num_default_workers
+import multiprocessing as mp
+from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
+    ReplaceVerilogRelPaths,
+)
 
 
 class CreateStitchedIP(Transformation):
@@ -48,14 +54,141 @@ class CreateStitchedIP(Transformation):
     The packaged block design IP can be found under the ip subdirectory.
     """
 
-    def __init__(self, fpgapart):
+    def __init__(self, fpgapart, clk_ns, ip_name="finn_design", vitis=False):
         super().__init__()
         self.fpgapart = fpgapart
+        self.clk_ns = clk_ns
+        self.ip_name = ip_name
+        self.vitis = vitis
+        if float(clk_ns) not in [5.0, 10.0, 20.0]:
+            warnings.warn(
+                """The chosen frequency may lead to failure due to clock divider
+                constraints."""
+            )
+        self.has_axilite = False
+        self.has_aximm = False
+        self.has_m_axis = False
+        self.m_axis_idx = 0
+        self.has_s_axis = False
+        self.s_axis_idx = 0
+        self.clock_reset_are_external = False
+        self.create_cmds = []
+        self.connect_cmds = []
+        # keep track of top-level interface names
+        self.intf_names = {
+            "clk": [],
+            "rst": [],
+            "s_axis": [],
+            "m_axis": [],
+            "aximm": [],
+            "axilite": [],
+        }
+
+    def connect_clk_rst(self, node):
+        inst_name = node.name
+        node_inst = getCustomOp(node)
+        clock_intf_name = node_inst.get_verilog_top_module_intf_names()["clk"][0]
+        reset_intf_name = node_inst.get_verilog_top_module_intf_names()["rst"][0]
+        # make clock and reset external, if they aren't already
+        if not self.clock_reset_are_external:
+            self.connect_cmds.append(
+                "make_bd_pins_external [get_bd_pins %s/%s]"
+                % (inst_name, clock_intf_name)
+            )
+            self.connect_cmds.append("set_property name ap_clk [get_bd_ports ap_clk_0]")
+            self.connect_cmds.append(
+                "make_bd_pins_external [get_bd_pins %s/%s]"
+                % (inst_name, reset_intf_name)
+            )
+            self.connect_cmds.append(
+                "set_property name ap_rst_n [get_bd_ports ap_rst_n_0]"
+            )
+            self.clock_reset_are_external = True
+            self.intf_names["clk"] = ["ap_clk"]
+            self.intf_names["rst"] = ["ap_rst_n"]
+        # otherwise connect clock and reset
+        else:
+            self.connect_cmds.append(
+                "connect_bd_net [get_bd_ports ap_rst_n] [get_bd_pins %s/%s]"
+                % (inst_name, reset_intf_name)
+            )
+            self.connect_cmds.append(
+                "connect_bd_net [get_bd_ports ap_clk] [get_bd_pins %s/%s]"
+                % (inst_name, clock_intf_name)
+            )
+
+    def connect_axi(self, node):
+        inst_name = node.name
+        node_inst = getCustomOp(node)
+        axilite_intf_name = node_inst.get_verilog_top_module_intf_names()["axilite"]
+        aximm_intf_name = node_inst.get_verilog_top_module_intf_names()["aximm"]
+        if len(axilite_intf_name) != 0:
+            self.connect_cmds.append(
+                "make_bd_intf_pins_external "
+                "[get_bd_intf_pins %s/%s]" % (inst_name, axilite_intf_name[0])
+            )
+            self.connect_cmds.append(
+                "set_property name s_axi_control " "[get_bd_intf_ports s_axi_control_0]"
+            )
+            assert (
+                self.has_axilite is False
+            ), "Currently limited to one slave AXI-Stream"
+            self.intf_names["axilite"] = ["s_axi_control"]
+            self.has_axilite = True
+        if len(aximm_intf_name) != 0:
+            self.connect_cmds.append(
+                "make_bd_intf_pins_external [get_bd_intf_pins %s/%s]"
+                % (inst_name, aximm_intf_name[0])
+            )
+            self.connect_cmds.append(
+                "set_property name m_axi_gmem0 [get_bd_intf_ports m_axi_gmem_0]"
+            )
+            self.intf_names["aximm"] = ["m_axi_gmem0"]
+            assert self.has_aximm is False, "Currently limited to one AXI-MM interface"
+            self.has_aximm = True
+
+    def connect_m_axis_external(self, node):
+        inst_name = node.name
+        node_inst = getCustomOp(node)
+        output_intf_names = node_inst.get_verilog_top_module_intf_names()["m_axis"]
+        # make output axis external
+        for output_intf_name in output_intf_names:
+            self.connect_cmds.append(
+                "make_bd_intf_pins_external [get_bd_intf_pins %s/%s]"
+                % (inst_name, output_intf_name)
+            )
+            self.connect_cmds.append(
+                "set_property name m_axis_%d [get_bd_intf_ports %s_0]"
+                % (self.m_axis_idx, output_intf_name)
+            )
+            self.has_m_axis = True
+            self.intf_names["m_axis"].append("m_axis_%d" % self.m_axis_idx)
+            self.m_axis_idx += 1
+
+    def connect_s_axis_external(self, node):
+        inst_name = node.name
+        node_inst = getCustomOp(node)
+        input_intf_names = node_inst.get_verilog_top_module_intf_names()["s_axis"]
+        # make input axis external
+        for input_intf_name in input_intf_names:
+            self.connect_cmds.append(
+                "make_bd_intf_pins_external [get_bd_intf_pins %s/%s]"
+                % (inst_name, input_intf_name)
+            )
+            self.connect_cmds.append(
+                "set_property name s_axis_%d [get_bd_intf_ports %s_0]"
+                % (self.s_axis_idx, input_intf_name)
+            )
+            self.has_s_axis = True
+            self.intf_names["s_axis"].append("s_axis_%d" % self.s_axis_idx)
+            self.s_axis_idx += 1
 
     def apply(self, model):
+        # ensure non-relative readmemh .dat files
+        model = model.transform(ReplaceVerilogRelPaths())
         ip_dirs = ["list"]
-        create_cmds = []
-        connect_cmds = []
+        # add RTL streamer IP
+        ip_dirs.append("/workspace/finn/finn-rtllib/memstream")
         # ensure that all nodes are fpgadataflow, and that IPs are generated
         for node in model.graph.node:
             assert node.domain == "finn", 'Node domain is not set to "finn"'
@@ -70,62 +203,58 @@ class CreateStitchedIP(Transformation):
             ip_dir_value = node_inst.get_nodeattr("ip_path")
             assert os.path.isdir(ip_dir_value), "IP generation directory doesn't exist."
             ip_dirs += [ip_dir_value]
-            vlnv = node_inst.get_nodeattr("ip_vlnv")
-            inst_name = node.name
-            create_cmd = "create_bd_cell -type ip -vlnv %s %s" % (vlnv, inst_name)
-            create_cmds += [create_cmd]
-            # TODO nonlinear topologies: check this for all inputs
+            self.create_cmds += node_inst.code_generation_ipi()
             my_producer = model.find_producer(node.input[0])
+            self.connect_clk_rst(node)
+            self.connect_axi(node)
             if my_producer is None:
                 # first node in graph
-                # make clock and reset external
-                connect_cmds.append(
-                    "make_bd_pins_external [get_bd_pins %s/ap_clk]" % inst_name
-                )
-                connect_cmds.append(
-                    "make_bd_pins_external [get_bd_pins %s/ap_rst_n]" % inst_name
-                )
-                # make input external
-                connect_cmds.append(
-                    "make_bd_intf_pins_external [get_bd_intf_pins %s/in0_V_V]"
-                    % inst_name
-                )
+                self.connect_s_axis_external(node)
+                if node.op_type == "TLastMarker":
+                    assert (
+                        node_inst.get_nodeattr("Direction") == "in"
+                    ), """Output TLastMarker incorrect direction"""
+                elif node.op_type == "IODMA" and len(model.graph.node) != 1:
+                    # don't apply this check for a 1-node partition
+                    assert (
+                        node_inst.get_nodeattr("direction") == "in"
+                    ), """Input DMA incorrect direction"""
             else:
                 # intermediate node
-                # wire up global clock and reset
-                connect_cmds.append(
-                    "connect_bd_net [get_bd_ports ap_rst_n_0] [get_bd_pins %s/ap_rst_n]"
-                    % inst_name
-                )
-                connect_cmds.append(
-                    "connect_bd_net [get_bd_ports ap_clk_0] [get_bd_pins %s/ap_clk]"
-                    % inst_name
-                )
-                # wire up input to previous output
-                # TODO nonlinear topologies: loop over all inputs
-                my_in_name = "%s/in0_V_V" % (inst_name)
-                prev_out_name = "%s/out_V_V" % (my_producer.name)
-                connect_cmds.append(
-                    "connect_bd_intf_net [get_bd_intf_pins %s] [get_bd_intf_pins %s]"
-                    % (prev_out_name, my_in_name)
-                )
-            if model.find_consumer(node.output[0]) is None:
+                # wire up input(s) to previous node output(s)
+                # foreach input
+                #     find producer
+                #     find index of producer output connected to our target input
+                #     get names of hdl interfaces for input and producer output
+                #     issue a TCL directive to connect input to output
+                #     if FC layer with mode "decoupled", add a streamer on input 1
+                for i in range(len(node.input)):
+                    producer = model.find_producer(node.input[i])
+                    if producer is None:
+                        continue
+                    j = list(producer.output).index(node.input[i])
+                    src_intf_name = getCustomOp(
+                        producer
+                    ).get_verilog_top_module_intf_names()["m_axis"][j]
+                    dst_intf_name = node_inst.get_verilog_top_module_intf_names()[
+                        "s_axis"
+                    ][i]
+                    self.connect_cmds.append(
+                        "connect_bd_intf_net [get_bd_intf_pins %s/%s] "
+                        "[get_bd_intf_pins %s/%s]"
+                        % (producer.name, src_intf_name, node.name, dst_intf_name)
+                    )
+            if model.find_consumers(node.output[0]) is None:
                 # last node in graph
-                # ensure it is a TLastMarker to have a valid TLast signal
-                assert (
-                    node.op_type == "TLastMarker"
-                ), """Last node is not TLastMarker.
-                Please run transformation InsertTLastMarker to ensure a valid
-                TLast signal"""
-                # make output external
-                connect_cmds.append(
-                    "make_bd_intf_pins_external [get_bd_intf_pins %s/out_r]" % inst_name
-                )
-                # make AXI lite IF external
-                connect_cmds.append(
-                    "make_bd_intf_pins_external [get_bd_intf_pins %s/s_axi_control]"
-                    % inst_name
-                )
+                self.connect_m_axis_external(node)
+                if node.op_type == "TLastMarker":
+                    assert (
+                        node_inst.get_nodeattr("Direction") == "out"
+                    ), """Output TLastMarker incorrect direction"""
+                elif node.op_type == "IODMA" and len(model.graph.node) != 1:
+                    assert (
+                        node_inst.get_nodeattr("direction") == "out"
+                    ), """Output DMA incorrect direction"""
 
         # create a temporary folder for the project
         prjname = "finn_vivado_stitch_proj"
@@ -143,21 +272,54 @@ class CreateStitchedIP(Transformation):
         tcl.append("set_property ip_repo_paths [%s] [current_project]" % ip_dirs_str)
         tcl.append("update_ip_catalog")
         # create block design and instantiate all layers
-        block_name = "finn_design"
+        block_name = self.ip_name
         tcl.append('create_bd_design "%s"' % block_name)
-        tcl.extend(create_cmds)
-        tcl.extend(connect_cmds)
-        # TODO get from Transformation arg or metadata_prop
-        fclk_hz = 100 * 1000000
-        tcl.append("set_property CONFIG.FREQ_HZ %f [get_bd_ports /ap_clk_0]" % fclk_hz)
+        tcl.extend(self.create_cmds)
+        tcl.extend(self.connect_cmds)
+        fclk_mhz = 1 / (self.clk_ns * 0.001)
+        fclk_hz = fclk_mhz * 1000000
+        model.set_metadata_prop("clk_ns", str(self.clk_ns))
+        tcl.append("set_property CONFIG.FREQ_HZ %f [get_bd_ports /ap_clk]" % fclk_hz)
         tcl.append("regenerate_bd_layout")
         tcl.append("validate_bd_design")
         tcl.append("save_bd_design")
+        # create wrapper hdl (for rtlsim later on)
+        bd_base = "%s/%s.srcs/sources_1/bd/%s" % (
+            vivado_stitch_proj_dir,
+            prjname,
+            block_name,
+        )
+        bd_filename = "%s/%s.bd" % (bd_base, block_name)
+        tcl.append("make_wrapper -files [get_files %s] -top" % bd_filename)
+        wrapper_filename = "%s/hdl/%s_wrapper.v" % (bd_base, block_name)
+        tcl.append("add_files -norecurse %s" % wrapper_filename)
+        model.set_metadata_prop("wrapper_filename", wrapper_filename)
+        # synthesize to DCP and export stub, DCP and constraints
+        if self.vitis:
+            tcl.append(
+                "set_property SYNTH_CHECKPOINT_MODE Hierarchical [ get_files %s ]"
+                % bd_filename
+            )
+            tcl.append(
+                "set_property -name {STEPS.SYNTH_DESIGN.ARGS.MORE OPTIONS} "
+                "-value {-mode out_of_context} -objects [get_runs synth_1]"
+            )
+            num_workers = get_num_default_workers()
+            assert num_workers >= 0, "Number of workers must be nonnegative."
+            if num_workers == 0:
+                num_workers = mp.cpu_count()
+            tcl.append("launch_runs synth_1 -jobs %s" % str(num_workers))
+            tcl.append("wait_on_run [get_runs synth_1]")
+            tcl.append("open_run synth_1 -name synth_1")
+            tcl.append("write_verilog -force -mode synth_stub %s.v" % block_name)
+            tcl.append("write_checkpoint %s.dcp" % block_name)
+            tcl.append("write_xdc %s.xdc" % block_name)
         # export block design itself as an IP core
         block_vendor = "xilinx_finn"
         block_library = "finn"
         block_vlnv = "%s:%s:%s:1.0" % (block_vendor, block_library, block_name)
         model.set_metadata_prop("vivado_stitch_vlnv", block_vlnv)
+        model.set_metadata_prop("vivado_stitch_ifnames", str(self.intf_names))
         tcl.append(
             (
                 "ipx::package_project -root_dir %s/ip -vendor %s "
@@ -167,21 +329,94 @@ class CreateStitchedIP(Transformation):
         )
         tcl.append("set_property core_revision 2 [ipx::find_open_core %s]" % block_vlnv)
         tcl.append("ipx::create_xgui_files [ipx::find_open_core %s]" % block_vlnv)
+        # if targeting Vitis, add some properties to the IP
+        if self.vitis:
+            tcl.append(
+                "ipx::remove_bus_parameter FREQ_HZ "
+                "[ipx::get_bus_interfaces CLK.AP_CLK -of_objects [ipx::current_core]]"
+            )
+            # replace source code with dcp
+            tcl.append(
+                "set_property sdx_kernel true [ipx::find_open_core %s]" % block_vlnv
+            )
+            tcl.append(
+                "set_property sdx_kernel_type rtl [ipx::find_open_core %s]" % block_vlnv
+            )
+            tcl.append(
+                "set_property supported_families { } [ipx::find_open_core %s]"
+                % block_vlnv
+            )
+            tcl.append(
+                "set_property xpm_libraries {XPM_CDC XPM_MEMORY XPM_FIFO} "
+                "[ipx::find_open_core %s]" % block_vlnv
+            )
+            tcl.append(
+                "set_property auto_family_support_level level_2 "
+                "[ipx::find_open_core %s]" % block_vlnv
+            )
+            # remove all files from synthesis and sim groups
+            # we'll replace with DCP, stub, and xdc
+            tcl.append(
+                "ipx::remove_all_file "
+                "[ipx::get_file_groups xilinx_anylanguagebehavioralsimulation]"
+            )
+            tcl.append(
+                "ipx::remove_all_file "
+                "[ipx::get_file_groups xilinx_anylanguagesynthesis]"
+            )
+            tcl.append(
+                "ipx::remove_file_group "
+                "xilinx_anylanguagebehavioralsimulation [ipx::current_core]"
+            )
+            tcl.append(
+                "ipx::remove_file_group "
+                "xilinx_anylanguagesynthesis [ipx::current_core]"
+            )
+            # remove sim and src folders
+            tcl.append("file delete -force %s/ip/sim" % vivado_stitch_proj_dir)
+            tcl.append("file delete -force %s/ip/src" % vivado_stitch_proj_dir)
+            # copy and add DCP, stub, and xdc
+            tcl.append("file mkdir %s/ip/dcp" % vivado_stitch_proj_dir)
+            tcl.append("file mkdir %s/ip/impl" % vivado_stitch_proj_dir)
+            tcl.append(
+                "file copy -force %s.dcp %s/ip/dcp"
+                % (block_name, vivado_stitch_proj_dir)
+            )
+            tcl.append(
+                "file copy -force %s.xdc %s/ip/impl"
+                % (block_name, vivado_stitch_proj_dir)
+            )
+            tcl.append("ipx::add_file_group xilinx_implementation [ipx::current_core]")
+            tcl.append(
+                "ipx::add_file impl/%s.xdc [ipx::get_file_groups xilinx_implementation]"
+                % block_name
+            )
+            tcl.append(
+                "set_property used_in [list implementation] "
+                "[ipx::get_files impl/%s.xdc "
+                "-of_objects [ipx::get_file_groups xilinx_implementation]]" % block_name
+            )
+            tcl.append(
+                "ipx::add_file_group " "xilinx_synthesischeckpoint [ipx::current_core]"
+            )
+            tcl.append(
+                "ipx::add_file dcp/%s.dcp "
+                "[ipx::get_file_groups xilinx_synthesischeckpoint]" % block_name
+            )
+            tcl.append(
+                "ipx::add_file_group xilinx_simulationcheckpoint [ipx::current_core]"
+            )
+            tcl.append(
+                "ipx::add_file dcp/%s.dcp "
+                "[ipx::get_file_groups xilinx_simulationcheckpoint]" % block_name
+            )
         tcl.append("ipx::update_checksums [ipx::find_open_core %s]" % block_vlnv)
         tcl.append("ipx::save_core [ipx::find_open_core %s]" % block_vlnv)
-        # create wrapper hdl (for rtlsim later on)
-        bd_base = "%s/%s.srcs/sources_1/bd/%s" % (
-            vivado_stitch_proj_dir,
-            prjname,
-            block_name,
-        )
-        bd_filename = "%s/%s.bd" % (bd_base, block_name)
-        tcl.append("make_wrapper -files [get_files %s] -top" % bd_filename)
-        wrapper_filename = "%s/hdl/%s_wrapper.v" % (bd_base, block_name)
-        tcl.append("add_files -norecurse %s" % wrapper_filename)
-        model.set_metadata_prop("wrapper_filename", wrapper_filename)
         # export list of used Verilog files (for rtlsim later on)
-        tcl.append("set all_v_files [get_files -filter {FILE_TYPE == Verilog}]")
+        tcl.append(
+            "set all_v_files [get_files -filter {FILE_TYPE == Verilog "
+            + "&& USED_IN_SYNTHESIS == 1} ]"
+        )
         v_file_list = "%s/all_verilog_srcs.txt" % vivado_stitch_proj_dir
         tcl.append("set fp [open %s w]" % v_file_list)
         # write each verilog filename to all_verilog_srcs.txt
diff --git a/src/finn/transformation/fpgadataflow/floorplan.py b/src/finn/transformation/fpgadataflow/floorplan.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d9a51875499d77f384c03f54009a9dd1001dea0
--- /dev/null
+++ b/src/finn/transformation/fpgadataflow/floorplan.py
@@ -0,0 +1,80 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from finn.custom_op.registry import getCustomOp
+from finn.transformation import Transformation
+from finn.util.basic import get_by_name
+
+
+class Floorplan(Transformation):
+    """Perform Floorplanning of the dataflow design. Separate DMAs into their own
+    partitions IDs, and TODO: split the design into sections of defined size"""
+
+    def __init__(self, limits=None):
+        super().__init__()
+        self.resource_limits = limits
+
+    def apply(self, model):
+        target_partition_id = 0
+        # we currently assume that all dataflow nodes belonging to the same partition
+        # are connected to each other and there is a single input/output to/from each.
+        all_nodes = list(model.graph.node)
+        df_nodes = list(
+            filter(lambda x: get_by_name(x.attribute, "backend") is not None, all_nodes)
+        )
+        dma_nodes = list(filter(lambda x: x.op_type == "IODMA", df_nodes))
+
+        non_dma_nodes = list(filter(lambda x: x not in dma_nodes, df_nodes))
+        dyn_tlastmarker_nodes = list(
+            filter(
+                lambda x: x.op_type == "TLastMarker"
+                and getCustomOp(x).get_nodeattr("DynIters") == "true",
+                non_dma_nodes,
+            )
+        )
+
+        non_dma_nodes = list(
+            filter(lambda x: x not in dyn_tlastmarker_nodes, non_dma_nodes)
+        )
+
+        for node in dma_nodes:
+            node_inst = getCustomOp(node)
+            node_inst.set_nodeattr("partition_id", target_partition_id)
+            target_partition_id += 1
+
+        for node in dyn_tlastmarker_nodes:
+            node_inst = getCustomOp(node)
+            node_inst.set_nodeattr("partition_id", target_partition_id)
+            target_partition_id += 1
+
+        for node in non_dma_nodes:
+            # TODO: implement proper floorplanning; for now just a single partition
+            node_inst = getCustomOp(node)
+            node_inst.set_nodeattr("partition_id", target_partition_id)
+
+        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/hlssynth_ip.py b/src/finn/transformation/fpgadataflow/hlssynth_ip.py
index 6d7c4025653948d6958672177ae5e36ab08bf279..8315b6ec11e3d0bc5e2bf97e7c11817ae8b5a5b1 100644
--- a/src/finn/transformation/fpgadataflow/hlssynth_ip.py
+++ b/src/finn/transformation/fpgadataflow/hlssynth_ip.py
@@ -26,9 +26,11 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import os
 import finn.custom_op.registry as registry
 from finn.util.fpgadataflow import is_fpgadataflow_node
 from finn.transformation import NodeLocalTransformation
+import warnings
 
 
 class HLSSynthIP(NodeLocalTransformation):
@@ -36,9 +38,11 @@ class HLSSynthIP(NodeLocalTransformation):
     that is referenced in node attribute "code_gen_dir_ipgen"
     and save path of generated project in node attribute "ipgen_path".
     All nodes in the graph must have the fpgadataflow backend attribute.
+    Any nodes that already have a ipgen_path attribute pointing to a valid path
+    will be skipped.
 
     This transformation calls Vivado HLS for synthesis, so it will run for
-    some time (several minutes)
+    some time (minutes to hours depending on configuration).
 
     * num_workers (int or None) number of parallel workers, see documentation in
       NodeLocalTransformation for more details.
@@ -59,8 +63,11 @@ class HLSSynthIP(NodeLocalTransformation):
                 ), """Node
                 attribute "code_gen_dir_ipgen" is empty. Please run
                 transformation PrepareIP first."""
-                # call the compilation function for this node
-                inst.ipgen_singlenode_code()
+                if not os.path.isdir(inst.get_nodeattr("ipgen_path")):
+                    # call the compilation function for this node
+                    inst.ipgen_singlenode_code()
+                else:
+                    warnings.warn("Using pre-existing IP for %s" % node.name)
                 # ensure that executable path is now set
                 assert (
                     inst.get_nodeattr("ipgen_path") != ""
diff --git a/src/finn/transformation/fpgadataflow/insert_fifo.py b/src/finn/transformation/fpgadataflow/insert_fifo.py
index b01f8cbe5c48db6c5288b2db1a8b009ea09ce6c0..6f7fde0c4faba09e584eb578819f44c18639bc9d 100644
--- a/src/finn/transformation/fpgadataflow/insert_fifo.py
+++ b/src/finn/transformation/fpgadataflow/insert_fifo.py
@@ -118,8 +118,11 @@ class InsertFIFO(Transformation):
                         graph_modified = True
 
         if graph_modified is False:
-            # insert FIFO as first node
-            if graph.node[0].op_type != "StreamingFIFO":
+            # insert FIFO as first node, except when first node is DMA
+            if (
+                graph.node[0].op_type != "StreamingFIFO"
+                and graph.node[0].op_type != "IODMA"
+            ):
                 n = graph.node[0]
                 n_input = n.input[0]
                 n0 = getCustomOp(n)
@@ -153,8 +156,11 @@ class InsertFIFO(Transformation):
                 # set fifo output tensor as new input tensor of second node
                 n.input[0] = fifo_output_tensor.name
 
-            # insert FIFO as last node
-            if graph.node[-1].op_type != "StreamingFIFO":
+            # insert FIFO as last node, except when last node is DMA
+            if (
+                graph.node[-1].op_type != "StreamingFIFO"
+                and graph.node[-1].op_type != "IODMA"
+            ):
                 n = graph.node[-1]
                 assert (
                     n.op_type != "TLastMarker"
diff --git a/src/finn/transformation/fpgadataflow/insert_iodma.py b/src/finn/transformation/fpgadataflow/insert_iodma.py
new file mode 100644
index 0000000000000000000000000000000000000000..72e5ec4fdd721ecf549adaf7ddd38db4636bce27
--- /dev/null
+++ b/src/finn/transformation/fpgadataflow/insert_iodma.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from onnx import TensorProto
+from onnx import helper as oh
+
+from finn.util.basic import get_by_name
+from finn.custom_op.registry import getCustomOp
+from finn.transformation import Transformation
+from finn.transformation.general import SortGraph
+import finn.core.data_layout as DataLayout
+import math
+import numpy as np
+
+
+class InsertIODMA(Transformation):
+    """Insert DMA nodes on all inputs and outputs."""
+
+    def __init__(self, max_intfwidth=32):
+        super().__init__()
+        assert (
+            2 ** math.log2(max_intfwidth) == max_intfwidth
+        ), "max_intfwidth must be a power of 2"
+        self.max_intfwidth = max_intfwidth
+
+    def apply(self, model):
+        # only makes sense for a pure fpgadataflow graph -- so we check!
+        all_nodes = list(model.graph.node)
+        assert all(
+            get_by_name(x.attribute, "backend").s.decode("UTF-8") == "fpgadataflow"
+            for x in all_nodes
+        )
+        # parse streamingfclayers looking for external weights with no attached IODMA
+        fc_extw_nodes = list(
+            filter(
+                lambda x: x.op_type == "StreamingFCLayer_Batch"
+                and get_by_name(x.attribute, "mem_mode") is not None
+                and get_by_name(x.attribute, "mem_mode").s.decode("UTF-8") == "external"
+                and model.find_producer(x.input[1]) is None,
+                all_nodes,
+            )
+        )
+        graph_in_name = model.graph.input[0].name
+        first_node = model.find_consumer(graph_in_name)
+        graph_out_name = model.graph.output[0].name
+        final_node = model.find_producer(graph_out_name)
+        if (
+            final_node.op_type == "IODMA"
+            and first_node.op_type == "IODMA"
+            and len(fc_extw_nodes) == 0
+        ):
+            # TODO maybe check the correctness of properties
+            return (model, False)
+        else:
+            if final_node.op_type != "IODMA":
+                # check if tensor is NHWC
+                assert (
+                    model.get_tensor_layout(graph_out_name) == DataLayout.NHWC
+                    or model.get_tensor_layout(graph_out_name) == DataLayout.NC
+                ), "Data layout of output tensor must be NHWC or NC"
+                out_shape = model.get_tensor_shape(graph_out_name)
+                out_dtype = model.get_tensor_datatype(graph_out_name)
+                # determine the feasible interface width
+                transfer_bits = np.prod(out_shape) * out_dtype.bitwidth()
+                intfwidth = math.gcd(transfer_bits, self.max_intfwidth)
+                assert (
+                    intfwidth % 8 == 0
+                ), "No feasible interface width for transfer size"
+                # get width of stream input to DMA
+                streamWidth = getCustomOp(final_node).get_outstream_width()
+                # make new buffer
+                final_node_out = oh.make_tensor_value_info(
+                    model.make_new_valueinfo_name(), TensorProto.FLOAT, out_shape
+                )
+                model.graph.value_info.append(final_node_out)
+                model.set_tensor_datatype(final_node_out.name, out_dtype)
+                # reroute final node output to final_node_out_name
+                final_node.output[0] = final_node_out.name
+                dma_node = oh.make_node(
+                    "IODMA",
+                    [final_node_out.name],
+                    [graph_out_name],
+                    numInputVectors=out_shape[:-1],
+                    NumChannels=out_shape[-1],
+                    dataType=str(out_dtype.name),
+                    intfWidth=intfwidth,
+                    streamWidth=streamWidth,
+                    direction="out",
+                    domain="finn",
+                    backend="fpgadataflow",
+                )
+                model.graph.node.append(dma_node)
+            if first_node.op_type != "IODMA":
+                # check if tensor is NHWC
+                assert (
+                    model.get_tensor_layout(graph_in_name) == DataLayout.NHWC
+                    or model.get_tensor_layout(graph_in_name) == DataLayout.NC
+                ), "Data layout of input tensor must be NHWC or NC"
+                in_shape = model.get_tensor_shape(graph_in_name)
+                in_dtype = model.get_tensor_datatype(graph_in_name)
+                # determine the feasible interface width
+                transfer_bits = np.prod(in_shape) * in_dtype.bitwidth()
+                intfwidth = math.gcd(transfer_bits, self.max_intfwidth)
+                assert (
+                    intfwidth % 8 == 0
+                ), "No feasible interface width for transfer size"
+                # get width of stream output from DMA
+                streamWidth = getCustomOp(first_node).get_instream_width()
+                # make new buffer
+                first_node_in = oh.make_tensor_value_info(
+                    model.make_new_valueinfo_name(), TensorProto.FLOAT, in_shape
+                )
+                model.graph.value_info.append(first_node_in)
+                model.set_tensor_datatype(first_node_in.name, in_dtype)
+                # reroute final node output to final_node_out_name
+                first_node.input[0] = first_node_in.name
+                dma_node = oh.make_node(
+                    "IODMA",
+                    [graph_in_name],
+                    [first_node_in.name],
+                    numInputVectors=in_shape[:-1],
+                    NumChannels=in_shape[-1],
+                    dataType=str(in_dtype.name),
+                    intfWidth=intfwidth,
+                    streamWidth=streamWidth,
+                    direction="in",
+                    domain="finn",
+                    backend="fpgadataflow",
+                )
+                model.graph.node.insert(0, dma_node)
+            for fc_node in fc_extw_nodes:
+                # check if tensor is NHWC
+                assert (
+                    model.get_tensor_layout(fc_node.input[1]) == DataLayout.NHWC
+                    or model.get_tensor_layout(graph_in_name) == DataLayout.NC
+                ), "Data layout of tensors must be NHWC or NC"
+                fc_w_name = fc_node.input[1]
+                w_shape = model.get_tensor_shape(fc_w_name)
+                w_dtype = model.get_tensor_datatype(fc_w_name)
+                # determine the feasible interface width
+                transfer_bits = np.prod(w_shape) * w_dtype.bitwidth()
+                intfwidth = math.gcd(transfer_bits, self.max_intfwidth)
+                assert (
+                    intfwidth % 8 == 0
+                ), "No feasible interface width for transfer size"
+                # calculate width of stream output from DMA
+                pe = get_by_name(fc_node.attribute, "PE").i
+                simd = get_by_name(fc_node.attribute, "SIMD").i
+                assert pe * simd == w_shape[0], "Malformed weight matrix"
+                streamWidth = simd * pe * w_dtype.bitwidth()
+                # make new buffer
+                fc_node_in = oh.make_tensor_value_info(
+                    model.make_new_valueinfo_name(), TensorProto.FLOAT, w_shape
+                )
+                model.graph.value_info.append(fc_node_in)
+                model.set_tensor_datatype(fc_node_in.name, w_dtype)
+                model.set_initializer(fc_node_in.name, model.get_initializer(fc_w_name))
+                dma_node = oh.make_node(
+                    "IODMA",
+                    [fc_w_name],
+                    [fc_node_in.name],
+                    numInputVectors=[w_shape[1]],
+                    NumChannels=w_shape[0],
+                    dataType=str(w_dtype.name),
+                    intfWidth=intfwidth,
+                    streamWidth=streamWidth,
+                    direction="in",
+                    burstMode="wrap",
+                    domain="finn",
+                    backend="fpgadataflow",
+                )
+                fc_node.input[1] = fc_node_in.name
+                model.graph.node.insert(0, dma_node)
+            model = model.transform(SortGraph())
+            return (model, True)
diff --git a/src/finn/transformation/fpgadataflow/insert_tlastmarker.py b/src/finn/transformation/fpgadataflow/insert_tlastmarker.py
index 32f32ece585a93465ba32fede45d5eb606a2b0a3..bbb0e43fda464e919a7d8c9dcd25e08a49b33cec 100644
--- a/src/finn/transformation/fpgadataflow/insert_tlastmarker.py
+++ b/src/finn/transformation/fpgadataflow/insert_tlastmarker.py
@@ -31,23 +31,35 @@ from onnx import helper as oh
 
 from finn.custom_op.registry import getCustomOp
 from finn.transformation import Transformation
+from finn.util.basic import get_by_name
+
+import numpy as np
 
 
 class InsertTLastMarker(Transformation):
-    """Ensure that the graph is terminated with a TLastMarker node, inserting
-    one if necessary."""
+    """Ensure that the graph is started/terminated with a TLastMarker node, inserting
+    one if necessary.
+    Use constructor args to determine type of TLastMarker to be inserted.
+    More information available on the TLastMarker documentation.
+    """
 
-    def __init__(self):
+    def __init__(self, both=False, external=True, dynamic=True):
         super().__init__()
+        self.dyniters = dynamic
+        self.external = external
+        self.both = both
 
     def apply(self, model):
         # TODO only makes sense for a pure fpgadataflow graph -- check!
         graph_out_name = model.graph.output[0].name
         final_node = model.find_producer(graph_out_name)
-        if final_node.op_type == "TLastMarker":
-            # TODO maybe check the correctness of properties
-            return (model, False)
-        else:
+        graph_modified = False
+        if final_node.op_type != "TLastMarker" and not (
+            final_node.op_type == "IODMA"
+            and get_by_name(final_node.attribute, "direction").s.decode("UTF-8")
+            == "out"
+        ):
+
             custom_op = getCustomOp(final_node)
             num_iters = int(custom_op.get_number_output_values())
             stream_width = int(custom_op.get_outstream_width())
@@ -69,8 +81,88 @@ class InsertTLastMarker(Transformation):
                 NumIters=num_iters,
                 StreamWidth=stream_width,
                 ElemWidth=elem_width,
+                DynIters=(1 if self.dyniters else 0),
+                Direction="out",
+                Protocol=("external" if self.external else "internal"),
                 domain="finn",
                 backend="fpgadataflow",
             )
             model.graph.node.append(tlast_node)
-            return (model, True)
+            graph_modified = True
+        # if both is True, also insert marker on input
+        if self.both:
+            # detect and parse graph inputs
+            insert_idx = 0
+            graph_in_names = [x.name for x in model.graph.input]
+            for graph_in_name in graph_in_names:
+                first_node = model.find_consumers(graph_in_name)
+                # skip if no consumers (this may be the case for unused initializers)
+                # TODO: fix this with a cleanup transform
+                if first_node is None:
+                    continue
+                assert len(first_node) == 1, "Input fans out to multiple nodes"
+                first_node = first_node[0]
+                # several scenarios exclude the node:
+                # 1. node is a FC layer with internal weights, in which case
+                #    the input is in the list of graph inputs because it has an
+                #    initializer (TODO: fix this with a clean-up transform)
+                if (
+                    first_node.op_type == "StreamingFCLayer_Batch"
+                    and get_by_name(first_node.attribute, "mem_mode").s.decode("UTF-8")
+                    != "external"
+                ):
+                    continue
+                # 2. node is either a TLastMarker or an input IODMA
+                if first_node.op_type != "TLastMarker" and not (
+                    first_node.op_type == "IODMA"
+                    and get_by_name(first_node.attribute, "direction").s.decode("UTF-8")
+                    == "in"
+                ):
+
+                    custom_op = getCustomOp(first_node)
+                    num_iters = np.prod(custom_op.get_folded_input_shape()[1:-1])
+                    inp_idx = list(first_node.input).index(graph_in_name)
+                    if inp_idx > 0:
+                        if (
+                            first_node.op_type == "StreamingFCLayer_Batch"
+                            and inp_idx == 1
+                        ):
+                            stream_width = int(custom_op.get_weightstream_width())
+                        elif first_node.op_type == "AddStreams_Batch" and inp_idx == 1:
+                            stream_width = int(custom_op.get_instream_width())
+                        else:
+                            raise Exception("No method to determine stream width")
+                    else:
+                        stream_width = int(custom_op.get_instream_width())
+                    in_shape = model.get_tensor_shape(graph_in_name)
+                    in_dtype = model.get_tensor_datatype(graph_in_name)
+                    elem_width = in_dtype.bitwidth()
+                    # make new buffer
+                    first_node_in = oh.make_tensor_value_info(
+                        model.make_new_valueinfo_name(), TensorProto.FLOAT, in_shape
+                    )
+                    model.graph.value_info.append(first_node_in)
+                    model.set_tensor_datatype(first_node_in.name, in_dtype)
+                    ini = model.get_initializer(graph_in_name)
+                    # copy initializer if it exists
+                    if ini is not None:
+                        model.set_initializer(first_node_in.name, ini)
+                    # reroute final node output to first_node_in_name
+                    first_node.input[inp_idx] = first_node_in.name
+                    tlast_node = oh.make_node(
+                        "TLastMarker",
+                        [graph_in_name],
+                        [first_node_in.name],
+                        NumIters=num_iters,
+                        StreamWidth=stream_width,
+                        ElemWidth=elem_width,
+                        DynIters=(1 if self.dyniters else 0),
+                        Direction="in",
+                        Protocol=("external" if self.external else "internal"),
+                        domain="finn",
+                        backend="fpgadataflow",
+                    )
+                    model.graph.node.insert(insert_idx, tlast_node)
+                    graph_modified = True
+                    insert_idx += 1
+        return (model, graph_modified)
diff --git a/src/finn/transformation/fpgadataflow/make_deployment.py b/src/finn/transformation/fpgadataflow/make_deployment.py
index a185f5392c4b5ec848cd463e02ebab4be9c56a46..2880e4aba20564f50a0acdff5e8c728714c84b5c 100644
--- a/src/finn/transformation/fpgadataflow/make_deployment.py
+++ b/src/finn/transformation/fpgadataflow/make_deployment.py
@@ -26,6 +26,7 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import warnings
 import os
 import subprocess
 from distutils.dir_util import copy_tree
@@ -33,6 +34,7 @@ from shutil import copy
 
 from finn.transformation import Transformation
 from finn.util.basic import make_build_dir
+import finn.transformation.fpgadataflow.templates as templates
 
 
 class DeployToPYNQ(Transformation):
@@ -64,35 +66,54 @@ class DeployToPYNQ(Transformation):
 
         # get and copy necessary files
         # .bit and .hwh file
-        vivado_pynq_proj = model.get_metadata_prop("vivado_pynq_proj")
-        for file in os.listdir(vivado_pynq_proj):
-            if file.endswith(".bit"):
-                bitfile = os.path.join(vivado_pynq_proj, file)
-            elif file.endswith(".hwh"):
-                hwhfile = os.path.join(vivado_pynq_proj, file)
-        copy(bitfile, deployment_dir)
-        copy(hwhfile, deployment_dir)
+        bitfile = model.get_metadata_prop("bitfile")
+        hwh_file = model.get_metadata_prop("hw_handoff")
+        deploy_files = [bitfile, hwh_file]
+
+        for dfile in deploy_files:
+            if dfile is not None:
+                copy(dfile, deployment_dir)
+
+        # helper script for Alveo
+        platform = model.get_metadata_prop("platform")
+        if platform == "alveo":
+            alveo_run_sh = templates.alveo_run_sh_template
+            fill_dict = {
+                "$REMOTE_DEPLOY_DIR$": self.target_dir
+                + "/"
+                + os.path.basename(deployment_dir),
+                "$CONDA_ENV_NAME$": "finn-pynq-alveo",
+                "$REMOTE_XRT$": os.environ["XILINX_XRT"],
+                "$REMOTE_PLATFORM_REPO_PATHS$": os.environ["PLATFORM_REPO_PATHS"],
+                "$BITFILE$": os.path.basename(bitfile),
+            }
+            for key, value in fill_dict.items():
+                alveo_run_sh = alveo_run_sh.replace(key, value)
+            alveo_run_sh_path = deployment_dir + "/alveo_run.sh"
+            with open(alveo_run_sh_path, "w") as f:
+                f.write(alveo_run_sh)
 
         # driver.py and python libraries
         pynq_driver_dir = model.get_metadata_prop("pynq_driver_dir")
         copy_tree(pynq_driver_dir, deployment_dir)
         model.set_metadata_prop("pynq_deploy_dir", deployment_dir)
         model.set_metadata_prop("exec_mode", "remote_pynq")
+        if self.password == "":
+            prefix = ""  # assume we are using an ssh key
+            warnings.warn("Empty password, make sure you've set up an ssh key")
+        else:
+            prefix = "sshpass -p %s " % self.password
+
         # create target directory on PYNQ board
-        cmd = 'sshpass -p {} ssh {}@{} -p {} "mkdir -p {}"'.format(
-            self.password, self.username, self.ip, self.port, self.target_dir
+        cmd = prefix + 'ssh {}@{} -p {} "mkdir -p {}"'.format(
+            self.username, self.ip, self.port, self.target_dir
         )
         bash_command = ["/bin/bash", "-c", cmd]
         process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
         process_compile.communicate()
         # copy directory to PYNQ board using scp and sshpass
-        cmd = "sshpass -p {} scp -P{} -r {} {}@{}:{}".format(
-            self.password,
-            self.port,
-            deployment_dir,
-            self.username,
-            self.ip,
-            self.target_dir,
+        cmd = prefix + "scp -P{} -r {} {}@{}:{}".format(
+            self.port, deployment_dir, self.username, self.ip, self.target_dir,
         )
         bash_command = ["/bin/bash", "-c", cmd]
         process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
diff --git a/src/finn/transformation/fpgadataflow/make_pynq_driver.py b/src/finn/transformation/fpgadataflow/make_pynq_driver.py
index 049ede5064d252bd6391184c4227e5367a8c1e2b..813b40698d1beec54e6ba3fa5344a8d0bb715a00 100644
--- a/src/finn/transformation/fpgadataflow/make_pynq_driver.py
+++ b/src/finn/transformation/fpgadataflow/make_pynq_driver.py
@@ -26,10 +26,8 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-import os
-import shutil
 
-from finn.custom_op.registry import getCustomOp
+import shutil
 from finn.transformation import Transformation
 from finn.util.basic import gen_finn_dt_tensor, get_finn_root, make_build_dir
 from finn.util.data_packing import finnpy_to_packed_bytearray
@@ -42,19 +40,18 @@ class MakePYNQDriver(Transformation):
     accelerator, including data packing/unpacking. The MakePYNQProject
     transformation must have been already applied.
 
+    platform: one of ["zynq-iodma", "alveo"]
+
     Outcome if successful: sets the pynq_driver_dir attribute in the ONNX
     ModelProto's metadata_props field, with the created driver dir as the
     value.
     """
 
-    def __init__(self):
+    def __init__(self, platform):
         super().__init__()
+        self.platform = platform
 
     def apply(self, model):
-        vivado_pynq_proj = model.get_metadata_prop("vivado_pynq_proj")
-        if vivado_pynq_proj is None or (not os.path.isdir(vivado_pynq_proj)):
-            raise Exception("No PYNQ project found, apply MakePYNQProject first.")
-
         # create a temporary folder for the generated driver
         pynq_driver_dir = make_build_dir(prefix="pynq_driver_")
         model.set_metadata_prop("pynq_driver_dir", pynq_driver_dir)
@@ -67,11 +64,16 @@ class MakePYNQDriver(Transformation):
         o_tensor_shape_normal = tuple(model.get_tensor_shape(o_tensor_name))
         i_tensor_dt = model.get_tensor_datatype(i_tensor_name)
         o_tensor_dt = model.get_tensor_datatype(o_tensor_name)
-        # extract HLSCustomOp instances to get folded i/o shapes
-        first_node = getCustomOp(model.find_consumer(i_tensor_name))
-        last_node = getCustomOp(model.find_producer(o_tensor_name))
-        i_tensor_shape_folded = tuple(first_node.get_folded_input_shape())
-        o_tensor_shape_folded = tuple(last_node.get_folded_output_shape())
+        # folded shapes for i/o simply derived from regular tensor shapes
+        # this used to be extracted from first/last node folded shapes, but
+        # can't do this anymore due to IODMAs
+        i_tensor_shape_folded = list(i_tensor_shape_normal)
+        i_tensor_shape_folded.insert(-1, 1)
+        i_tensor_shape_folded = tuple(i_tensor_shape_folded)
+        o_tensor_shape_folded = list(o_tensor_shape_normal)
+        o_tensor_shape_folded.insert(-1, 1)
+        o_tensor_shape_folded = tuple(o_tensor_shape_folded)
+
         # generate dummy folded i/o tensors and their packed versions
         i_tensor_dummy_folded = gen_finn_dt_tensor(i_tensor_dt, i_tensor_shape_folded)
         o_tensor_dummy_folded = gen_finn_dt_tensor(o_tensor_dt, o_tensor_shape_folded)
@@ -98,6 +100,7 @@ class MakePYNQDriver(Transformation):
             ret = ret.replace("[1,", "[%s," % batch_var_name)
             return ret
 
+        driver = driver.replace("$PLATFORM$", self.platform)
         driver = driver.replace("$INPUT_FINN_DATATYPE$", str(i_tensor_dt))
         driver = driver.replace("$INPUT_SHAPE_NORMAL$", mss(i_tensor_shape_normal))
         driver = driver.replace("$INPUT_SHAPE_FOLDED$", mss(i_tensor_shape_folded))
@@ -107,8 +110,27 @@ class MakePYNQDriver(Transformation):
         driver = driver.replace("$OUTPUT_SHAPE_FOLDED$", mss(o_tensor_shape_folded))
         driver = driver.replace("$OUTPUT_SHAPE_PACKED$", mss(o_tensor_shape_packed))
 
+        # clock settings for driver
+        clk_ns = model.get_metadata_prop("clk_ns")
+        # default to 10ns / 100 MHz if property not set
+        if clk_ns is None:
+            clk_ns = 10.0
+        else:
+            clk_ns = float(clk_ns)
+        fclk_mhz = 1 / (clk_ns * 0.001)
+        # TODO change according to PYNQ board?
+        driver = driver.replace("$CLK_NAME$", "fclk0_mhz")
+        driver = driver.replace("$CLOCK_FREQ_MHZ$", str(fclk_mhz))
+
         with open(driver_py, "w") as f:
             f.write(driver)
+
+        # add validate.py to run full top-1 test (only for suitable networks)
+        validate_py = pynq_driver_dir + "/validate.py"
+        validate_src = templates.pynq_validation_template
+        with open(validate_py, "w") as f:
+            f.write(validate_src)
+
         # copy all the dependencies into the driver folder
         shutil.copytree(
             get_finn_root() + "/src/finn/util", pynq_driver_dir + "/finn/util"
diff --git a/src/finn/transformation/fpgadataflow/make_pynq_proj.py b/src/finn/transformation/fpgadataflow/make_pynq_proj.py
deleted file mode 100644
index 429b74bb5ea7e359ea720a0a86706f2c653ee6ce..0000000000000000000000000000000000000000
--- a/src/finn/transformation/fpgadataflow/make_pynq_proj.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-import subprocess
-
-from finn.custom_op.registry import getCustomOp
-from finn.transformation import Transformation
-from finn.util.basic import get_by_name, make_build_dir, roundup_to_integer_multiple
-
-from . import templates
-
-
-class MakePYNQProject(Transformation):
-    """Create a Vivado PYNQ overlay project (including the shell infrastructure)
-    from the already-stitched IP block for this graph.
-    All nodes in the graph must have the fpgadataflow backend attribute,
-    and the CreateStitchedIP transformation must have been previously run on
-    the graph.
-
-    Outcome if successful: sets the vivado_pynq_proj attribute in the ONNX
-    ModelProto's metadata_props field, with the created project dir as the
-    value.
-    """
-
-    def __init__(self, platform):
-        super().__init__()
-        self.platform = platform
-
-    def apply(self, model):
-        pynq_shell_path = os.environ["PYNQSHELL_PATH"]
-        if not os.path.isdir(pynq_shell_path):
-            raise Exception("Ensure the PYNQ-HelloWorld utility repo is cloned.")
-        ipstitch_path = model.get_metadata_prop("vivado_stitch_proj")
-        if ipstitch_path is None or (not os.path.isdir(ipstitch_path)):
-            raise Exception(
-                "No stitched IPI design found, apply CreateStitchedIP first."
-            )
-        vivado_stitch_vlnv = model.get_metadata_prop("vivado_stitch_vlnv")
-        if vivado_stitch_vlnv is None:
-            raise Exception(
-                "No vlnv for stitched IP found, apply CreateStitchedIP first."
-            )
-
-        # collect list of all IP dirs
-        ip_dirs = ["list"]
-        for node in model.graph.node:
-            ip_dir_attribute = get_by_name(node.attribute, "ip_path")
-            assert (
-                ip_dir_attribute is not None
-            ), """Node attribute "ip_path" is
-            empty. Please run transformation HLSSynth_ipgen first."""
-            ip_dir_value = ip_dir_attribute.s.decode("UTF-8")
-            assert os.path.isdir(
-                ip_dir_value
-            ), """The directory that should
-            contain the generated ip blocks doesn't exist."""
-            ip_dirs += [ip_dir_value]
-        ip_dirs += [ipstitch_path + "/ip"]
-        ip_dirs_str = "[%s]" % (" ".join(ip_dirs))
-
-        # extract HLSCustomOp instances to get i/o stream widths
-        i_tensor_name = model.graph.input[0].name
-        o_tensor_name = model.graph.output[0].name
-        first_node = getCustomOp(model.find_consumer(i_tensor_name))
-        last_node = getCustomOp(model.find_producer(o_tensor_name))
-        i_bits_per_cycle = first_node.get_instream_width()
-        o_bits_per_cycle = last_node.get_outstream_width()
-        # ensure i/o is padded to bytes
-        i_bits_per_cycle_padded = roundup_to_integer_multiple(i_bits_per_cycle, 8)
-        o_bits_per_cycle_padded = roundup_to_integer_multiple(o_bits_per_cycle, 8)
-        assert (
-            i_bits_per_cycle_padded % 8 == 0
-        ), """Padded input bits are not a
-        multiple of 8."""
-        assert (
-            o_bits_per_cycle_padded % 8 == 0
-        ), """Padded output bits are not a
-        multiple of 8."""
-        in_bytes = i_bits_per_cycle_padded / 8
-        out_bytes = o_bits_per_cycle_padded / 8
-        in_if_name = "in0_V_V_0"
-        out_if_name = "out_r_0"
-        clk_name = "ap_clk_0"
-        nrst_name = "ap_rst_n_0"
-        axi_lite_if_name = "s_axi_control_0"
-        vivado_ip_cache = os.getenv("VIVADO_IP_CACHE", default="")
-        # TODO get from Transformation arg or metadata_prop
-        fclk_mhz = 100.0
-
-        # create a temporary folder for the project
-        vivado_pynq_proj_dir = make_build_dir(prefix="vivado_pynq_proj_")
-        model.set_metadata_prop("vivado_pynq_proj", vivado_pynq_proj_dir)
-        # filename for the synth utilization report
-        synth_report_filename = vivado_pynq_proj_dir + "/synth_report.xml"
-        model.set_metadata_prop("vivado_synth_rpt", synth_report_filename)
-
-        ip_config_tcl = templates.ip_config_tcl_template % (
-            vivado_pynq_proj_dir,
-            ip_dirs_str,
-            vivado_pynq_proj_dir,
-            synth_report_filename,
-            vivado_stitch_vlnv,
-            in_bytes,
-            out_bytes,
-            in_if_name,
-            out_if_name,
-            clk_name,
-            nrst_name,
-            axi_lite_if_name,
-            vivado_ip_cache,
-            fclk_mhz,
-        )
-
-        with open(vivado_pynq_proj_dir + "/ip_config.tcl", "w") as f:
-            f.write(ip_config_tcl)
-        # create a shell script for project creation and synthesis
-        make_project_sh = vivado_pynq_proj_dir + "/make_project.sh"
-        working_dir = os.environ["PWD"]
-        ipcfg = vivado_pynq_proj_dir + "/ip_config.tcl"
-        with open(make_project_sh, "w") as f:
-            f.write(
-                templates.call_pynqshell_makefile_template
-                % (pynq_shell_path, self.platform, ipcfg, "block_design", working_dir)
-            )
-        synth_project_sh = vivado_pynq_proj_dir + "/synth_project.sh"
-        with open(synth_project_sh, "w") as f:
-            f.write(
-                templates.call_pynqshell_makefile_template
-                % (pynq_shell_path, self.platform, ipcfg, "bitstream", working_dir)
-            )
-        # call the project creation script
-        # synthesis script will be called with a separate transformation
-        bash_command = ["bash", make_project_sh]
-        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-        process_compile.communicate()
-        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/make_zynq_proj.py b/src/finn/transformation/fpgadataflow/make_zynq_proj.py
new file mode 100644
index 0000000000000000000000000000000000000000..e263c450af5d2dca09a79b7757a0c0d67bdf86ff
--- /dev/null
+++ b/src/finn/transformation/fpgadataflow/make_zynq_proj.py
@@ -0,0 +1,317 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import subprocess
+
+from finn.custom_op.registry import getCustomOp
+from finn.transformation import Transformation
+from finn.core.modelwrapper import ModelWrapper
+from finn.util.basic import get_by_name, make_build_dir
+from finn.util.basic import get_num_default_workers
+from finn.util.basic import pynq_part_map
+
+from finn.transformation.fpgadataflow.create_dataflow_partition import (
+    CreateDataflowPartition,
+)
+from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
+from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
+from finn.transformation.fpgadataflow.insert_iodma import InsertIODMA
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
+from finn.transformation.fpgadataflow.floorplan import Floorplan
+from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from shutil import copy
+from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
+
+from . import templates
+
+
+def collect_ip_dirs(model, ipstitch_path):
+    # collect list of all IP dirs
+    ip_dirs = []
+    for node in model.graph.node:
+        ip_dir_attribute = get_by_name(node.attribute, "ip_path")
+        assert (
+            ip_dir_attribute is not None
+        ), """Node attribute "ip_path" is
+        empty. Please run transformation HLSSynth_ipgen first."""
+        ip_dir_value = ip_dir_attribute.s.decode("UTF-8")
+        assert os.path.isdir(
+            ip_dir_value
+        ), """The directory that should
+        contain the generated ip blocks doesn't exist."""
+        ip_dirs += [ip_dir_value]
+    ip_dirs += [ipstitch_path + "/ip"]
+    return ip_dirs
+
+
+class MakeZYNQProject(Transformation):
+    """Create a Vivado overlay project (including the shell infrastructure)
+    from the already-stitched IP block for this graph.
+    All nodes in the graph must have the fpgadataflow backend attribute,
+    and the CreateStitchedIP transformation must have been previously run on
+    the graph. This is functionally equivalent with MakePYNQProject but does
+    not use Pynq infrastructure and instead creates a fully custom block design.
+    However, this transform requires DMAs in the accelerator design.
+
+    Outcome if successful: sets the vivado_pynq_proj attribute in the ONNX
+    ModelProto's metadata_props field, with the created project dir as the
+    value.
+    """
+
+    def __init__(self, platform, enable_debug=False):
+        super().__init__()
+        self.platform = platform
+        self.enable_debug = 1 if enable_debug else 0
+
+    def apply(self, model):
+
+        # create a config file and empty list of xo files
+        config = []
+        idma_idx = 0
+        odma_idx = 0
+        aximm_idx = 0
+        axilite_idx = 0
+        global_clk_ns = 0
+        instance_names = {}
+        for node in model.graph.node:
+            assert node.op_type == "StreamingDataflowPartition", "Invalid link graph"
+            sdp_node = getCustomOp(node)
+            dataflow_model_filename = sdp_node.get_nodeattr("model")
+            kernel_model = ModelWrapper(dataflow_model_filename)
+
+            ipstitch_path = kernel_model.get_metadata_prop("vivado_stitch_proj")
+            if ipstitch_path is None or (not os.path.isdir(ipstitch_path)):
+                raise Exception(
+                    "No stitched IPI design found for %s, apply CreateStitchedIP first."
+                    % node.name
+                )
+
+            vivado_stitch_vlnv = kernel_model.get_metadata_prop("vivado_stitch_vlnv")
+            if vivado_stitch_vlnv is None:
+                raise Exception(
+                    "No vlnv found for %s, apply CreateStitchedIP first." % node.name
+                )
+
+            ip_dirs = ["list"]
+            ip_dirs += collect_ip_dirs(kernel_model, ipstitch_path)
+            ip_dirs_str = "[%s]" % (" ".join(ip_dirs))
+            config.append(
+                "set_property ip_repo_paths "
+                "[concat [get_property ip_repo_paths [current_project]] %s] "
+                "[current_project]" % ip_dirs_str
+            )
+            config.append("update_ip_catalog -rebuild -scan_changes")
+
+            # get metadata property clk_ns to calculate clock frequency
+            clk_ns = float(kernel_model.get_metadata_prop("clk_ns"))
+            if clk_ns > global_clk_ns:
+                global_clk_ns = clk_ns
+
+            # gather info on connectivity
+            # assume each node connected to outputs/inputs is DMA:
+            # has axis, aximm and axilite
+            # everything else is axis-only
+            # assume only one connection from each ip to the next
+            # all aximm allocated to DDR[0]
+            # all kernels allocated to SLR0
+            producer = model.find_producer(node.input[0])
+            consumer = model.find_consumers(node.output[0])
+            # define kernel instances
+            # name kernels connected to graph inputs as idmaxx
+            # name kernels connected to graph inputs as odmaxx
+            if producer is None or consumer is None:
+                if producer is None:
+                    instance_names[node.name] = "idma" + str(idma_idx)
+                elif consumer is None:
+                    instance_names[node.name] = "odma" + str(odma_idx)
+                config.append(
+                    "create_bd_cell -type ip -vlnv %s %s"
+                    % (vivado_stitch_vlnv, instance_names[node.name])
+                )
+                config.append(
+                    "connect_bd_intf_net [get_bd_intf_pins %s/m_axi_gmem0] "
+                    "[get_bd_intf_pins smartconnect_0/S%02d_AXI]"
+                    % (instance_names[node.name], aximm_idx)
+                )
+                config.append(
+                    "connect_bd_intf_net [get_bd_intf_pins %s/s_axi_control] "
+                    "[get_bd_intf_pins axi_interconnect_0/M%02d_AXI]"
+                    % (instance_names[node.name], axilite_idx)
+                )
+                idma_idx += 1
+                aximm_idx += 1
+                axilite_idx += 1
+            else:
+                instance_names[node.name] = node.name
+                config.append(
+                    "create_bd_cell -type ip -vlnv %s %s"
+                    % (vivado_stitch_vlnv, instance_names[node.name])
+                )
+            config.append(
+                "connect_bd_net [get_bd_pins %s/ap_clk] "
+                "[get_bd_pins smartconnect_0/aclk]" % instance_names[node.name]
+            )
+            config.append(
+                "connect_bd_net [get_bd_pins %s/ap_rst_n] "
+                "[get_bd_pins smartconnect_0/aresetn]" % instance_names[node.name]
+            )
+            # connect streams
+            if producer is not None:
+                for i in range(len(node.input)):
+                    producer = model.find_producer(node.input[i])
+                    if producer is not None:
+                        j = list(producer.output).index(node.input[i])
+                        config.append(
+                            "connect_bd_intf_net [get_bd_intf_pins %s/s_axis_%d] "
+                            "[get_bd_intf_pins %s/m_axis_%d]"
+                            % (
+                                instance_names[node.name],
+                                i,
+                                instance_names[producer.name],
+                                j,
+                            )
+                        )
+
+        # create a temporary folder for the project
+        vivado_pynq_proj_dir = make_build_dir(prefix="vivado_zynq_proj_")
+        model.set_metadata_prop("vivado_pynq_proj", vivado_pynq_proj_dir)
+
+        fclk_mhz = int(1 / (global_clk_ns * 0.001))
+
+        # create a TCL recipe for the project
+        ipcfg = vivado_pynq_proj_dir + "/ip_config.tcl"
+        config = "\n".join(config) + "\n"
+        with open(ipcfg, "w") as f:
+            f.write(
+                templates.custom_zynq_shell_template
+                % (
+                    fclk_mhz,
+                    axilite_idx,
+                    aximm_idx,
+                    self.platform,
+                    pynq_part_map[self.platform],
+                    config,
+                    self.enable_debug,
+                    get_num_default_workers(),
+                )
+            )
+
+        # create a TCL recipe for the project
+        synth_project_sh = vivado_pynq_proj_dir + "/synth_project.sh"
+        working_dir = os.environ["PWD"]
+        with open(synth_project_sh, "w") as f:
+            f.write("#!/bin/bash \n")
+            f.write("cd {}\n".format(vivado_pynq_proj_dir))
+            f.write("vivado -mode tcl -source %s\n" % ipcfg)
+            f.write("cd {}\n".format(working_dir))
+
+        # call the synthesis script
+        bash_command = ["bash", synth_project_sh]
+        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+        process_compile.communicate()
+        bitfile_name = (
+            vivado_pynq_proj_dir + "/finn_zynq_link.runs/impl_1/top_wrapper.bit"
+        )
+        if not os.path.isfile(bitfile_name):
+            raise Exception("Synthesis failed, no bitfile found")
+        deploy_bitfile_name = vivado_pynq_proj_dir + "/resizer.bit"
+        copy(bitfile_name, deploy_bitfile_name)
+        # set bitfile attribute
+        model.set_metadata_prop("bitfile", deploy_bitfile_name)
+        hwh_name = (
+            vivado_pynq_proj_dir
+            + "/finn_zynq_link.srcs/sources_1/bd/top/hw_handoff/top.hwh"
+        )
+        if not os.path.isfile(hwh_name):
+            raise Exception("Synthesis failed, no hardware handoff file found")
+        deploy_hwh_name = vivado_pynq_proj_dir + "/resizer.hwh"
+        copy(hwh_name, deploy_hwh_name)
+        model.set_metadata_prop("hw_handoff", deploy_hwh_name)
+        # filename for the synth utilization report
+        synth_report_filename = vivado_pynq_proj_dir + "/synth_report.xml"
+        model.set_metadata_prop("vivado_synth_rpt", synth_report_filename)
+        return (model, False)
+
+
+class ZynqBuild(Transformation):
+    """Best-effort attempt at building the accelerator for Zynq."""
+
+    def __init__(self, platform, period_ns, enable_debug=False):
+        super().__init__()
+        self.fpga_part = pynq_part_map[platform]
+        self.period_ns = period_ns
+        self.platform = platform
+        self.enable_debug = enable_debug
+
+    def apply(self, model):
+        # first infer layouts
+        model = model.transform(InferDataLayouts())
+        # prepare at global level, then break up into kernels
+        prep_transforms = [
+            MakePYNQDriver(platform="zynq-iodma"),
+            InsertIODMA(64),
+            InsertDWC(),
+            Floorplan(),
+            CreateDataflowPartition(),
+        ]
+        for trn in prep_transforms:
+            model = model.transform(trn)
+            model = model.transform(GiveUniqueNodeNames())
+            model = model.transform(GiveReadableTensorNames())
+        # Build each kernel individually
+        sdp_nodes = model.get_nodes_by_op_type("StreamingDataflowPartition")
+        for sdp_node in sdp_nodes:
+            prefix = sdp_node.name + "_"
+            sdp_node = getCustomOp(sdp_node)
+            dataflow_model_filename = sdp_node.get_nodeattr("model")
+            kernel_model = ModelWrapper(dataflow_model_filename)
+            kernel_model = kernel_model.transform(InsertFIFO())
+            kernel_model = kernel_model.transform(GiveUniqueNodeNames(prefix))
+            kernel_model.save(dataflow_model_filename)
+            kernel_model = kernel_model.transform(
+                PrepareIP(self.fpga_part, self.period_ns)
+            )
+            kernel_model = kernel_model.transform(HLSSynthIP())
+            kernel_model = kernel_model.transform(
+                CreateStitchedIP(
+                    self.fpga_part, self.period_ns, sdp_node.onnx_node.name, True
+                )
+            )
+            kernel_model.set_metadata_prop("platform", "zynq-iodma")
+            kernel_model.save(dataflow_model_filename)
+        # Assemble design from IPs
+        model = model.transform(
+            MakeZYNQProject(self.platform, enable_debug=self.enable_debug)
+        )
+        # set platform attribute for correct remote execution
+        model.set_metadata_prop("platform", "zynq-iodma")
+        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/synth_pynq_proj.py b/src/finn/transformation/fpgadataflow/minimize_accumulator_width.py
similarity index 61%
rename from src/finn/transformation/fpgadataflow/synth_pynq_proj.py
rename to src/finn/transformation/fpgadataflow/minimize_accumulator_width.py
index d7f73a7fe3dfcd0fef314304fe939623e577ac20..2c54a5efbd3b28f0fbfd074b512929edab234e78 100644
--- a/src/finn/transformation/fpgadataflow/synth_pynq_proj.py
+++ b/src/finn/transformation/fpgadataflow/minimize_accumulator_width.py
@@ -26,32 +26,23 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-import os
-import subprocess
-
+from finn.custom_op.registry import getCustomOp
 from finn.transformation import Transformation
+from finn.util.fpgadataflow import is_fpgadataflow_node
 
 
-class SynthPYNQProject(Transformation):
-    """Run synthesis for the PYNQ project for this graph. The MakePYNQProject
-    transformation must be applied prior to this transformation."""
+class MinimizeAccumulatorWidth(Transformation):
+    """For relevant nodes, call the accumulator width minimization
+    functions to save on resources. May alter tensor DataType for
+    certain nodes if they produce an accumulator as result."""
 
     def __init__(self):
         super().__init__()
 
     def apply(self, model):
-        vivado_pynq_proj_dir = model.get_metadata_prop("vivado_pynq_proj")
-        if vivado_pynq_proj_dir is None or (not os.path.isdir(vivado_pynq_proj_dir)):
-            raise Exception("No synthesis project, apply MakePYNQProject first.")
-        synth_project_sh = vivado_pynq_proj_dir + "/synth_project.sh"
-        if not os.path.isfile(synth_project_sh):
-            raise Exception("No synthesis script, apply MakePYNQProject first.")
-        bash_command = ["bash", synth_project_sh]
-        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-        process_compile.communicate()
-        # set bitfile attribute
-        model.set_metadata_prop(
-            "vivado_pynq_bitfile", vivado_pynq_proj_dir + "/resizer.bit"
-        )
-        # TODO pull out synthesis statistics and put them in as attributes
+        for node in model.graph.node:
+            if is_fpgadataflow_node(node) is True:
+                inst = getCustomOp(node)
+                if hasattr(inst, "minimize_accumulator_width"):
+                    inst.minimize_accumulator_width(model)
         return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/prepare_cppsim.py b/src/finn/transformation/fpgadataflow/prepare_cppsim.py
index a1524322ec03a4e96ef41f999144e3eed349c5af..6eae560e1191642cfaf85d92c6d0fcf644630973 100644
--- a/src/finn/transformation/fpgadataflow/prepare_cppsim.py
+++ b/src/finn/transformation/fpgadataflow/prepare_cppsim.py
@@ -29,9 +29,12 @@
 import os
 
 import finn.custom_op.registry as registry
-from finn.transformation import Transformation
 from finn.util.basic import make_build_dir
 from finn.util.fpgadataflow import is_fpgadataflow_node
+from finn.transformation import Transformation
+from finn.util.basic import get_num_default_workers
+import multiprocessing as mp
+import copy
 
 
 def _codegen_single_node(node, model):
@@ -66,8 +69,39 @@ class PrepareCppSim(Transformation):
     that contains generated C++ code that can be used to simulate node using cppsim.
     The subsequent transformation is CompileCppSim"""
 
+    def __init__(self, num_workers=None):
+        super().__init__()
+        if num_workers is None:
+            self._num_workers = get_num_default_workers()
+        else:
+            self._num_workers = num_workers
+        assert self._num_workers >= 0, "Number of workers must be nonnegative."
+        if self._num_workers == 0:
+            self._num_workers = mp.cpu_count()
+
+    def prepareCppSim_node(self, node):
+        if is_fpgadataflow_node(node) is True:
+            _codegen_single_node(node, self.model)
+        return (node, False)
+
     def apply(self, model):
-        for node in model.graph.node:
-            if is_fpgadataflow_node(node) is True:
-                _codegen_single_node(node, model)
-        return (model, False)
+        # Remove old nodes from the current model
+        self.model = copy.deepcopy(model)
+        old_nodes = []
+        for i in range(len(model.graph.node)):
+            old_nodes.append(model.graph.node.pop())
+
+        # Execute transformation in parallel
+        with mp.Pool(self._num_workers) as p:
+            new_nodes_and_bool = p.map(self.prepareCppSim_node, old_nodes, chunksize=1)
+
+        # extract nodes and check if the transformation needs to run again
+        # Note: .pop() had initially reversed the node order
+        run_again = False
+        for node, run in reversed(new_nodes_and_bool):
+            # Reattach new nodes to old model
+            model.graph.node.append(node)
+            if run is True:
+                run_again = True
+
+        return (model, run_again)
diff --git a/src/finn/transformation/fpgadataflow/prepare_ip.py b/src/finn/transformation/fpgadataflow/prepare_ip.py
index 00182773558ec30ab0271de6599615233785bdd7..21f8e0052d5f2d60f11f33846b483d3f556d1188 100644
--- a/src/finn/transformation/fpgadataflow/prepare_ip.py
+++ b/src/finn/transformation/fpgadataflow/prepare_ip.py
@@ -27,11 +27,11 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import os
-
 import finn.custom_op.registry as registry
 from finn.transformation import Transformation
 from finn.util.basic import make_build_dir
 from finn.util.fpgadataflow import is_fpgadataflow_node
+import warnings
 
 
 def _codegen_single_node(node, model, fpgapart, clk):
@@ -50,8 +50,10 @@ def _codegen_single_node(node, model, fpgapart, clk):
                 prefix="code_gen_ipgen_" + str(node.name) + "_"
             )
             inst.set_nodeattr("code_gen_dir_ipgen", code_gen_dir)
-        # ensure that there is generated code inside the dir
-        inst.code_generation_ipgen(model, fpgapart, clk)
+            # ensure that there is generated code inside the dir
+            inst.code_generation_ipgen(model, fpgapart, clk)
+        else:
+            warnings.warn("Using pre-existing code for %s" % node.name)
     except KeyError:
         # exception if op_type is not supported
         raise Exception("Custom op_type %s is currently not supported." % op_type)
@@ -67,6 +69,9 @@ class PrepareIP(Transformation):
 
     * clk in ns (int)
 
+    Any nodes that already have a code_gen_dir_ipgen attribute pointing to a valid path
+    will be skipped.
+
     Outcome if succesful: Node attribute "code_gen_dir_ipgen" contains path to folder
     that contains generated C++ code that can be used to generate a Vivado IP block.
     The subsequent transformation is HLSSynthIP"""
diff --git a/src/finn/transformation/fpgadataflow/prepare_rtlsim.py b/src/finn/transformation/fpgadataflow/prepare_rtlsim.py
index 5f0b89e85dc5f33319f64ef885db20ed9c4046af..8c28ab7e2376de392b0fdd628c70e854011dc406 100644
--- a/src/finn/transformation/fpgadataflow/prepare_rtlsim.py
+++ b/src/finn/transformation/fpgadataflow/prepare_rtlsim.py
@@ -28,7 +28,9 @@
 
 import finn.custom_op.registry as registry
 from finn.util.fpgadataflow import is_fpgadataflow_node
-
+from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
+    ReplaceVerilogRelPaths,
+)
 from finn.transformation import NodeLocalTransformation
 
 try:
@@ -54,6 +56,10 @@ class PrepareRTLSim(NodeLocalTransformation):
     def __init__(self, num_workers=None):
         super().__init__(num_workers=num_workers)
 
+    def apply(self, model):
+        model = model.transform(ReplaceVerilogRelPaths())
+        return super().apply(model)
+
     def applyNodeLocal(self, node):
         op_type = node.op_type
         if is_fpgadataflow_node(node) is True:
diff --git a/src/finn/transformation/fpgadataflow/synth_ooc.py b/src/finn/transformation/fpgadataflow/synth_ooc.py
new file mode 100644
index 0000000000000000000000000000000000000000..8fd7e4724ef7f255b1435d5ab5e680d155d39487
--- /dev/null
+++ b/src/finn/transformation/fpgadataflow/synth_ooc.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+from shutil import copy2
+
+from finn.transformation import Transformation
+from finn.util.vivado import out_of_context_synth
+from finn.util.basic import make_build_dir
+
+
+class SynthOutOfContext(Transformation):
+    """Run out-of-context Vivado synthesis on a stitched IP design."""
+
+    def __init__(self, part, clk_period_ns, clk_name="ap_clk"):
+        super().__init__()
+        self.part = part
+        self.clk_period_ns = clk_period_ns
+        self.clk_name = clk_name
+
+    def apply(self, model):
+        def file_to_basename(x):
+            return os.path.basename(os.path.realpath(x))
+
+        vivado_stitch_proj_dir = model.get_metadata_prop("vivado_stitch_proj")
+        assert vivado_stitch_proj_dir is not None, "Need stitched IP to run."
+        top_module_name = model.get_metadata_prop("wrapper_filename")
+        top_module_name = file_to_basename(top_module_name).strip(".v")
+        build_dir = make_build_dir("synth_out_of_context_")
+        with open(vivado_stitch_proj_dir + "/all_verilog_srcs.txt", "r") as f:
+            all_verilog_srcs = f.read().split()
+        for file in all_verilog_srcs:
+            if file.endswith(".v"):
+                copy2(file, build_dir)
+        ret = out_of_context_synth(
+            build_dir, top_module_name, self.part, self.clk_name, self.clk_period_ns
+        )
+        model.set_metadata_prop("res_total_ooc_synth", str(ret))
+        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/templates.py b/src/finn/transformation/fpgadataflow/templates.py
index 55ecb57decd2ac4fa08331b5ebbcb7fd2f0cd5c6..2b3789dc21cb4fe3f62d6b2a2ea0888329c9db66 100644
--- a/src/finn/transformation/fpgadataflow/templates.py
+++ b/src/finn/transformation/fpgadataflow/templates.py
@@ -91,7 +91,7 @@ cd %s
 
 pynq_driver_template = """
 import argparse
-
+import os
 from pynq import Overlay
 import numpy as np
 from pynq import allocate
@@ -101,11 +101,13 @@ from finn.util.data_packing import (
     packed_bytearray_to_finnpy
 )
 from finn.core.datatype import DataType
+from pynq.ps import Clocks
 
 class FINNAccelDriver():
-    def __init__(self, N, bitfile):
+    def __init__(self, N, bitfile, platform="$PLATFORM$"):
         \"\"\"Instantiate the FINN accelerator driver.
         Gets batchsize (N) as integer and path to bitfile as string.\"\"\"
+        self.platform = platform
         self.N = N
         # input FINN DataType
         self.idt = $INPUT_FINN_DATATYPE$
@@ -120,19 +122,29 @@ class FINNAccelDriver():
         self.oshape_packed = $OUTPUT_SHAPE_PACKED$  # datatype np.uint8
         # load bitfile and set up accelerator
         self.ol = Overlay(bitfile)
-        self.dma = self.ol.axi_dma_0
-        self.ctrl_regs = self.ol.resize_accel_0
         # neuron folding factor of output = iterations per sample
         self.itersPerSample = self.oshape_packed[-2]
-        # AXI lite register offset for number of iterations
-        # used by TLastMarker to signal end of transmission for AXI CDMA
-        self.REG_OFFSET_NUM_ITERS = 0x10
-        # set up TLastMarker with correct num. samples
-        self.ctrl_regs.write(self.REG_OFFSET_NUM_ITERS, self.N*self.itersPerSample)
+        # clock frequency as specified by user
+        self.fclk_mhz = $CLOCK_FREQ_MHZ$
+        if self.platform == "alveo":
+            self.idma = self.ol.idma0
+            self.odma = self.ol.odma0
+        elif self.platform == "zynq-iodma":
+            self.idma = self.ol.idma0
+            self.odma = self.ol.odma0
+            # set the clock frequency as specified by user during transformations
+            if self.fclk_mhz > 0:
+                Clocks.$CLK_NAME$ = self.fclk_mhz
+        else:
+            raise ValueError("Supported platforms are zynq-iodma alveo")
 
         # allocate a PYNQ buffer for the packed input and buffer
-        self.ibuf_packed_device = allocate(shape=self.ishape_packed, dtype=np.uint8)
-        self.obuf_packed_device = allocate(shape=self.oshape_packed, dtype=np.uint8)
+        if self.platform == "alveo":
+            self.ibuf_packed_device = allocate(shape=self.ishape_packed, dtype=np.uint8)
+            self.obuf_packed_device = allocate(shape=self.oshape_packed, dtype=np.uint8)
+        else:
+            self.ibuf_packed_device = allocate(shape=self.ishape_packed, dtype=np.uint8, cacheable=True)
+            self.obuf_packed_device = allocate(shape=self.oshape_packed, dtype=np.uint8, cacheable=True)
 
     def fold_input(self, ibuf_normal):
         \"\"\"Reshapes input in desired shape.
@@ -169,21 +181,40 @@ class FINNAccelDriver():
     def copy_input_data_to_device(self, data):
         \"\"\"Copies given input data to PYNQ buffer.\"\"\"
         np.copyto(self.ibuf_packed_device, data)
+        self.ibuf_packed_device.flush()
+
+    def copy_output_data_from_device(self, data):
+        \"\"\"Copies PYNQ output buffer from device.\"\"\"
+        self.obuf_packed_device.invalidate()
+        np.copyto(data, self.obuf_packed_device)
 
     def execute(self):
-        \"\"\"Executes accelerator by setting up the DMA and
-        waiting until all transfers complete. Uses only member variables and
+        \"\"\"Executes accelerator by setting up the DMA(s) and
+        waiting until all transfers/calls complete. Uses only member variables and
         returns nothing.\"\"\"
-        dma = self.dma
-        dma.sendchannel.transfer(self.ibuf_packed_device)
-        dma.recvchannel.transfer(self.obuf_packed_device)
-        dma.sendchannel.wait()
-        dma.recvchannel.wait()
+        if self.platform == "zynq-iodma":
+            # manually launch IODMAs since signatures are missing
+            self.idma.write(0x10, self.ibuf_packed_device.device_address)
+            self.idma.write(0x1c, self.N)
+            self.odma.write(0x10, self.obuf_packed_device.device_address)
+            self.odma.write(0x1c, self.N)
+            self.idma.write(0x00, 1)
+            self.odma.write(0x00, 1)
+            # wait until output IODMA is finished
+            status = self.odma.read(0x00)
+            while status & 0x2 == 0:
+                status = self.odma.read(0x00)
+        elif self.platform == "alveo":
+            idma_handle = self.idma.start_sw(self.ibuf_packed_device, self.N)
+            odma_handle = self.odma.start_sw(self.obuf_packed_device, self.N)
+            odma_handle.wait()
+
 
 
 if __name__ == "__main__":
     parser = argparse.ArgumentParser(description='Set exec mode, batchsize N, bitfile name, inputfile name and outputfile name')
     parser.add_argument('--exec_mode', help='Please select functional verification ("execute") or throughput test ("throughput_test")', default="execute")
+    parser.add_argument('--platform', help='Target platform: zynq-iodma alveo', default="zynq")
     parser.add_argument('--batchsize', help='number of samples for inference', type=int, default=1)
     parser.add_argument('--bitfile', help='name of bitfile (i.e. "resizer.bit")', default="resizer.bit")
     parser.add_argument('--inputfile', help='name of input npy file (i.e. "input.npy")', default="input.npy")
@@ -191,17 +222,24 @@ if __name__ == "__main__":
     # parse arguments
     args = parser.parse_args()
     exec_mode = args.exec_mode
+    platform = args.platform
     N = args.batchsize
     bitfile = args.bitfile
     inputfile = args.inputfile
     outputfile = args.outputfile
 
     # instantiate FINN accelerator driver and pass batchsize and bitfile
-    finnDriver = FINNAccelDriver(N, bitfile)
+    finnDriver = FINNAccelDriver(N, bitfile, platform)
 
     # for the remote execution the data from the input npy file has to be loaded,
     # packed and copied to the PYNQ buffer
     if exec_mode == "execute":
+        # remove old output file to prevent reusing old output
+        # in case execution fails
+        try:
+            os.remove(outputfile)
+        except FileNotFoundError:
+            pass
         # load desired input .npy file
         ibuf_normal = np.load(inputfile)
         ibuf_folded = finnDriver.fold_input(ibuf_normal)
@@ -212,10 +250,15 @@ if __name__ == "__main__":
 
     # for the throughput test the runtime of the network has to be measured
     if exec_mode == "throughput_test":
-        # measure runtime of network
-        start = time.time()
+        # remove old metrics file
+        try:
+            os.remove("nw_metrics.txt")
+        except FileNotFoundError:
+            pass
         # dictionary for results of throughput test
         res={}
+        # measure runtime of network
+        start = time.time()
 
     # execute accelerator
     finnDriver.execute()
@@ -228,15 +271,220 @@ if __name__ == "__main__":
         res["throughput[images/s]"] = N / runtime
         res["DRAM_in_bandwidth[Mb/s]"] = np.prod(finnDriver.ishape_packed)*0.000001 / runtime
         res["DRAM_out_bandwidth[Mb/s]"] = np.prod(finnDriver.oshape_packed)*0.000001 / runtime
+        if platform != "alveo":
+            res["fclk[mhz]"] = Clocks.fclk0_mhz
+        else:
+            res["fclk[mhz]"] = finnDriver.fclk_mhz
+        res["N"] = N
         file = open("nw_metrics.txt", "w")
         file.write(str(res))
         file.close()
 
     # if execution is selected unpack, unfold and save output to output npy file
     else:
-        obuf_folded = finnDriver.unpack_output(finnDriver.obuf_packed_device)
+        obuf_packed = np.empty_like(finnDriver.obuf_packed_device)
+        finnDriver.copy_output_data_from_device(obuf_packed)
+        obuf_folded = finnDriver.unpack_output(obuf_packed)
         obuf_normal = finnDriver.unfold_output(obuf_folded)
         np.save(outputfile, obuf_normal)
 
 
 """
+
+custom_zynq_shell_template = """
+set FREQ_MHZ %s
+set NUM_AXILITE %d
+if {$NUM_AXILITE > 9} {
+    error "Maximum 10 AXI-Lite interfaces supported"
+}
+set NUM_AXIMM %d
+set BOARD %s
+set FPGA_PART %s
+create_project finn_zynq_link ./ -part $FPGA_PART
+
+# set board part repo paths to find PYNQ-Z1/Z2
+set paths_prop [get_property BOARD_PART_REPO_PATHS [current_project]]
+set paths_param [get_param board.repoPaths]
+lappend paths_prop /workspace/finn/board_files
+lappend paths_param /workspace/finn/board_files
+set_property BOARD_PART_REPO_PATHS $paths_prop [current_project]
+set_param board.repoPaths $paths_param
+
+if {$BOARD == "ZCU104"} {
+    set_property board_part xilinx.com:zcu104:part0:1.1 [current_project]
+    set ZYNQ_TYPE "zynq_us+"
+} elseif {$BOARD == "Ultra96"} {
+    set_property board_part em.avnet.com:ultra96v1:part0:1.2 [current_project]
+    set ZYNQ_TYPE "zynq_us+"
+} elseif {$BOARD == "Pynq-Z2"} {
+    set ZYNQ_TYPE "zynq_7000"
+} elseif {$BOARD == "Pynq-Z1"} {
+    set ZYNQ_TYPE "zynq_7000"
+    set_property board_part www.digilentinc.com:pynq-z1:part0:1.0 [current_project]
+} else {
+    puts "Unrecognized board"
+}
+
+create_bd_design "top"
+if {$ZYNQ_TYPE == "zynq_us+"} {
+    create_bd_cell -type ip -vlnv xilinx.com:ip:zynq_ultra_ps_e:3.3 zynq_ps
+    apply_bd_automation -rule xilinx.com:bd_rule:zynq_ultra_ps_e -config {apply_board_preset "1" }  [get_bd_cells zynq_ps]
+    #activate one slave port, deactivate the second master port
+    set_property -dict [list CONFIG.PSU__USE__S_AXI_GP2 {1}] [get_bd_cells zynq_ps]
+    set_property -dict [list CONFIG.PSU__USE__M_AXI_GP1 {0}] [get_bd_cells zynq_ps]
+    #set frequency of PS clock (this can't always be exactly met)
+    set_property -dict [list CONFIG.PSU__CRL_APB__PL0_REF_CTRL__FREQMHZ [expr int($FREQ_MHZ)]] [get_bd_cells zynq_ps]
+} elseif {$ZYNQ_TYPE == "zynq_7000"} {
+    create_bd_cell -type ip -vlnv xilinx.com:ip:processing_system7:5.5 zynq_ps
+    apply_bd_automation -rule xilinx.com:bd_rule:processing_system7 -config {make_external "FIXED_IO, DDR" apply_board_preset "1" Master "Disable" Slave "Disable" }  [get_bd_cells zynq_ps]
+    set_property -dict [list CONFIG.PCW_USE_S_AXI_HP0 {1}] [get_bd_cells zynq_ps]
+    set_property -dict [list CONFIG.PCW_FPGA0_PERIPHERAL_FREQMHZ [expr int($FREQ_MHZ)]] [get_bd_cells zynq_ps]
+} else {
+    puts "Unrecognized Zynq type"
+}
+
+#instantiate axi interconnect, axi smartconnect
+create_bd_cell -type ip -vlnv xilinx.com:ip:axi_interconnect:2.1 axi_interconnect_0
+create_bd_cell -type ip -vlnv xilinx.com:ip:smartconnect:1.0 smartconnect_0
+#set number of axilite interfaces, and number of axi master interfaces
+set_property -dict [list CONFIG.NUM_SI $NUM_AXILITE] [get_bd_cells smartconnect_0]
+set_property -dict [list CONFIG.NUM_MI $NUM_AXIMM] [get_bd_cells axi_interconnect_0]
+
+#create reset controller and connect interconnects to PS
+if {$ZYNQ_TYPE == "zynq_us+"} {
+    connect_bd_intf_net [get_bd_intf_pins smartconnect_0/M00_AXI] [get_bd_intf_pins zynq_ps/S_AXI_HP0_FPD]
+    connect_bd_intf_net [get_bd_intf_pins zynq_ps/M_AXI_HPM0_FPD] -boundary_type upper [get_bd_intf_pins axi_interconnect_0/S00_AXI]
+    #connect interconnect clocks and resets
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/pl_clk0} Freq {} Ref_Clk0 {} Ref_Clk1 {} Ref_Clk2 {}}  [get_bd_pins axi_interconnect_0/ACLK]
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/pl_clk0} Freq {} Ref_Clk0 {} Ref_Clk1 {} Ref_Clk2 {}}  [get_bd_pins axi_interconnect_0/S00_ACLK]
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/pl_clk0} Freq {} Ref_Clk0 {} Ref_Clk1 {} Ref_Clk2 {}}  [get_bd_pins zynq_ps/saxihp0_fpd_aclk]
+} elseif {$ZYNQ_TYPE == "zynq_7000"} {
+    connect_bd_intf_net -boundary_type upper [get_bd_intf_pins zynq_ps/M_AXI_GP0] [get_bd_intf_pins axi_interconnect_0/S00_AXI]
+    connect_bd_intf_net [get_bd_intf_pins smartconnect_0/M00_AXI] [get_bd_intf_pins zynq_ps/S_AXI_HP0]
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/FCLK_CLK0} Freq {} Ref_Clk0 {} Ref_Clk1 {} Ref_Clk2 {}}  [get_bd_pins axi_interconnect_0/ACLK]
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/FCLK_CLK0} Freq {} Ref_Clk0 {} Ref_Clk1 {} Ref_Clk2 {}}  [get_bd_pins axi_interconnect_0/S00_ACLK]
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/FCLK_CLK0} Freq {} Ref_Clk0 {} Ref_Clk1 {} Ref_Clk2 {}}  [get_bd_pins zynq_ps/S_AXI_HP0_ACLK]
+}
+connect_bd_net [get_bd_pins axi_interconnect_0/ARESETN] [get_bd_pins smartconnect_0/aresetn]
+
+#custom IP instantiations/connections start here
+%s
+
+# set up debug
+if {%d == 1} {
+    set_property HDL_ATTRIBUTE.DEBUG true [get_bd_intf_nets {idma0_m_axis_0}]
+    set_property HDL_ATTRIBUTE.DEBUG true [get_bd_intf_nets {StreamingDataflowPartition_1_m_axis_0}]
+    set_property HDL_ATTRIBUTE.DEBUG true [get_bd_intf_nets {smartconnect_0_M00_AXI}]
+    apply_bd_automation -rule xilinx.com:bd_rule:debug -dict [list \
+                                                              [get_bd_intf_nets smartconnect_0_M00_AXI] {AXI_R_ADDRESS "Data and Trigger" AXI_R_DATA "Data and Trigger" AXI_W_ADDRESS "Data and Trigger" AXI_W_DATA "Data and Trigger" AXI_W_RESPONSE "Data and Trigger" CLK_SRC "/zynq_ps/FCLK_CLK0" SYSTEM_ILA "Auto" APC_EN "0" } \
+                                                              [get_bd_intf_nets idma0_m_axis_0] {AXIS_SIGNALS "Data and Trigger" CLK_SRC "/zynq_ps/FCLK_CLK0" SYSTEM_ILA "Auto" APC_EN "0" } \
+                                                              [get_bd_intf_nets StreamingDataflowPartition_1_m_axis_0] {AXIS_SIGNALS "Data and Trigger" CLK_SRC "/zynq_ps/FCLK_CLK0" SYSTEM_ILA "Auto" APC_EN "0" } \
+                                                             ]
+}
+
+#finalize clock and reset connections for interconnects
+if {$ZYNQ_TYPE == "zynq_us+"} {
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/pl_clk0} }  [get_bd_pins axi_interconnect_0/M*_ACLK]
+} elseif {$ZYNQ_TYPE == "zynq_7000"} {
+    apply_bd_automation -rule xilinx.com:bd_rule:clkrst -config { Clk {/zynq_ps/FCLK_CLK0} }  [get_bd_pins axi_interconnect_0/M*_ACLK]
+}
+
+save_bd_design
+assign_bd_address
+validate_bd_design
+
+set_property SYNTH_CHECKPOINT_MODE "Hierarchical" [ get_files top.bd ]
+make_wrapper -files [get_files top.bd] -import -fileset sources_1 -top
+
+set_property strategy Flow_PerfOptimized_high [get_runs synth_1]
+set_property STEPS.SYNTH_DESIGN.ARGS.DIRECTIVE AlternateRoutability [get_runs synth_1]
+set_property STEPS.SYNTH_DESIGN.ARGS.RETIMING true [get_runs synth_1]
+set_property strategy Performance_ExtraTimingOpt [get_runs impl_1]
+set_property STEPS.OPT_DESIGN.ARGS.DIRECTIVE Explore [get_runs impl_1]
+set_property STEPS.POST_ROUTE_PHYS_OPT_DESIGN.ARGS.DIRECTIVE AggressiveExplore [get_runs impl_1]
+set_property STEPS.PHYS_OPT_DESIGN.ARGS.DIRECTIVE AggressiveExplore [get_runs impl_1]
+set_property STEPS.POST_ROUTE_PHYS_OPT_DESIGN.IS_ENABLED true [get_runs impl_1]
+
+# out-of-context synth can't be used for bitstream generation
+# set_property -name {STEPS.SYNTH_DESIGN.ARGS.MORE OPTIONS} -value {-mode out_of_context} -objects [get_runs synth_1]
+launch_runs -to_step write_bitstream impl_1 -jobs %d
+wait_on_run [get_runs impl_1]
+
+# generate synthesis report
+open_run synth_1 -name synth_1
+report_utilization -hierarchical -hierarchical_depth 4 -file synth_report.xml -format xml
+"""
+
+alveo_run_sh_template = """#!/bin/bash
+
+if [ "$#" -ne 2 ]; then
+    echo "Usage: alveo_run.sh <exec_mode={execute, throughput_test}> <batch_size>"
+    exit -1
+fi
+
+cd $REMOTE_DEPLOY_DIR$
+eval "$(conda shell.bash hook)"
+conda activate $CONDA_ENV_NAME$
+source $REMOTE_XRT$/setup.sh
+export PLATFORM_REPO_PATHS=$REMOTE_PLATFORM_REPO_PATHS$
+python3.6 driver.py --exec_mode=$1 --batchsize=$2 --bitfile=$BITFILE$ \
+    --inputfile=input.npy --outputfile=output.npy --platform=alveo
+"""
+
+vitis_gen_xml_report_tcl_template = """
+open_project $VITIS_PROJ_PATH$/_x/link/vivado/vpl/prj/prj.xpr
+open_run impl_1
+report_utilization -hierarchical -hierarchical_depth 5 -file $VITIS_PROJ_PATH$/synth_report.xml -format xml
+"""
+
+pynq_validation_template = """
+import argparse
+from driver import FINNAccelDriver
+import numpy as np
+
+if __name__ == "__main__":
+  parser = argparse.ArgumentParser(description='Validate top-1 accuracy for FINN accelerator')
+  parser.add_argument('--batchsize', help='number of samples for inference', type=int, default=100)
+  parser.add_argument('--dataset', help='dataset to use (mnist of cifar10)', required=True)
+  # parse arguments
+  args = parser.parse_args()
+  bsize = args.batchsize
+  dataset = args.dataset
+
+  if dataset == "mnist":
+    from dataset_loading import mnist
+    trainx, trainy, testx, testy, valx, valy = mnist.load_mnist_data("/tmp", download=True, one_hot=False)
+  elif dataset == "cifar10":
+    from dataset_loading import cifar
+    trainx, trainy, testx, testy, valx, valy = cifar.load_cifar_data("/tmp", download=True, one_hot=False)
+  else:
+    raise Exception("Unrecognized dataset")
+
+  test_imgs = testx
+  test_labels = testy
+
+  ok = 0
+  nok = 0
+  total = test_imgs.shape[0]
+  driver = FINNAccelDriver(bsize, "resizer.bit", "zynq-iodma")
+
+  n_batches = int(total / bsize)
+
+  test_imgs = test_imgs.reshape(n_batches, bsize, -1)
+  test_labels = test_labels.reshape(n_batches, bsize)
+
+  for i in range(n_batches):
+    ibuf_normal = test_imgs[i].reshape(driver.ibuf_packed_device.shape)
+    exp = test_labels[i]
+    driver.copy_input_data_to_device(ibuf_normal)
+    driver.execute()
+    obuf_normal = np.empty_like(driver.obuf_packed_device)
+    driver.copy_output_data_from_device(obuf_normal)
+    ret = np.bincount(obuf_normal.flatten() == exp.flatten())
+    nok += ret[0]
+    ok += ret[1]
+    print("batch %d / %d : total OK %d NOK %d" % (i, n_batches, ok, nok))
+
+  acc = 100.0 * ok / (total)
+  print("Final accuracy: %f" % acc)
+"""
diff --git a/src/finn/transformation/fpgadataflow/vitis_build.py b/src/finn/transformation/fpgadataflow/vitis_build.py
new file mode 100644
index 0000000000000000000000000000000000000000..482dc8d784c66faef9392093c7c857630304eef3
--- /dev/null
+++ b/src/finn/transformation/fpgadataflow/vitis_build.py
@@ -0,0 +1,368 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import subprocess
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation import Transformation
+from finn.custom_op.registry import getCustomOp
+
+from finn.transformation.fpgadataflow.create_dataflow_partition import (
+    CreateDataflowPartition,
+)
+from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
+from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
+from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
+from finn.transformation.fpgadataflow.insert_iodma import InsertIODMA
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
+from finn.transformation.fpgadataflow.floorplan import Floorplan
+from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
+from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.util.basic import make_build_dir
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from . import templates
+from enum import Enum
+
+
+def _check_vitis_envvars():
+    assert "VITIS_PATH" in os.environ, "VITIS_PATH must be set for Vitis"
+    assert (
+        "PLATFORM_REPO_PATHS" in os.environ
+    ), "PLATFORM_REPO_PATHS must be set for Vitis"
+    assert (
+        "XILINX_XRT" in os.environ
+    ), "XILINX_XRT must be set for Vitis, ensure the XRT env is sourced"
+
+
+class VitisOptStrategy(Enum):
+    "Values applicable to VitisBuild optimization strategy."
+
+    DEFAULT = "0"
+    POWER = "1"
+    PERFORMANCE = "2"
+    PERFORMANCE_BEST = "3"
+    SIZE = "s"
+    BUILD_SPEED = "quick"
+
+
+class CreateVitisXO(Transformation):
+    """Create a Vitis object file from a stitched FINN ip.
+
+    Outcome if successful: sets the vitis_xo attribute in the ONNX
+    ModelProto's metadata_props field with the name of the object file as value.
+    The object file can be found under the ip subdirectory.
+    """
+
+    def __init__(self, ip_name="finn_design"):
+        super().__init__()
+        self.ip_name = ip_name
+
+    def apply(self, model):
+        _check_vitis_envvars()
+        vivado_proj_dir = model.get_metadata_prop("vivado_stitch_proj")
+        stitched_ip_dir = vivado_proj_dir + "/ip"
+        args_string = []
+        m_axis_idx = 0
+        s_axis_idx = 0
+        # NOTE: this assumes the graph is Vitis-compatible: max one axi lite interface
+        # developed from instructions in UG1393 (v2019.2) and package_xo documentation
+        # package_xo is responsible for generating the kernel xml
+        for node in model.graph.node:
+            node_inst = getCustomOp(node)
+            arg_id = 0
+            if node.op_type == "TLastMarker":
+                stream_width = node_inst.get_nodeattr("StreamWidth")
+                # add a stream input or output port, based on direction
+                if node_inst.get_nodeattr("Direction") == "in":
+                    args_string.append(
+                        "{in:4:%s:s_axis_%d:0x0:0x0:ap_uint&lt;%s>:0}"
+                        % (str(arg_id), s_axis_idx, str(stream_width))
+                    )
+                    s_axis_idx += 1
+                else:
+                    args_string.append(
+                        "{out:4:%s:m_axis_%d:0x0:0x0:ap_uint&lt;%s>:0}"
+                        % (str(arg_id), m_axis_idx, str(stream_width))
+                    )
+                    m_axis_idx += 1
+                arg_id += 1
+                # add a axilite port if dynamic
+                # add a count parameter if dynamic
+                if node_inst.get_nodeattr("DynIters") == 1:
+                    args_string.append(
+                        "{numReps:0:%s:s_axi_control:0x4:0x10:uint:0}" % str(arg_id)
+                    )
+                    arg_id += 1
+            elif node.op_type == "IODMA":
+                port_width = node_inst.get_nodeattr("intfWidth")
+                # add an address parameter
+                # add a count parameter
+                args_string.append(
+                    "{addr:1:%s:m_axi_gmem0:0x8:0x10:ap_uint&lt;%s>*:0}"
+                    % (str(arg_id), str(port_width))
+                )
+                arg_id += 1
+                args_string.append(
+                    "{numReps:0:%s:s_axi_control:0x4:0x1C:uint:0}" % str(arg_id)
+                )
+                arg_id += 1
+
+        # save kernel xml then run package_xo
+        xo_name = self.ip_name + ".xo"
+        xo_path = vivado_proj_dir + "/" + xo_name
+        model.set_metadata_prop("vitis_xo", xo_path)
+
+        # generate the package_xo command in a tcl script
+        package_xo_string = (
+            "package_xo -force -xo_path %s -kernel_name %s -ip_directory %s"
+            % (xo_path, self.ip_name, stitched_ip_dir)
+        )
+        for arg in args_string:
+            package_xo_string += " -kernel_xml_args " + arg
+        with open(vivado_proj_dir + "/gen_xo.tcl", "w") as f:
+            f.write(package_xo_string)
+
+        # create a shell script and call Vivado
+        package_xo_sh = vivado_proj_dir + "/gen_xo.sh"
+        working_dir = os.environ["PWD"]
+        with open(package_xo_sh, "w") as f:
+            f.write("#!/bin/bash \n")
+            f.write("cd {}\n".format(vivado_proj_dir))
+            f.write("vivado -mode batch -source gen_xo.tcl\n")
+            f.write("cd {}\n".format(working_dir))
+        bash_command = ["bash", package_xo_sh]
+        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+        process_compile.communicate()
+        assert os.path.isfile(xo_path), (
+            "Vitis .xo file not created, check logs under %s" % vivado_proj_dir
+        )
+        return (model, False)
+
+
+class VitisLink(Transformation):
+    """Create an XCLBIN with Vitis.
+
+    Outcome if successful: sets the bitfile attribute in the ONNX
+    ModelProto's metadata_props field with the XCLBIN full path as value.
+    """
+
+    def __init__(self, platform, f_mhz=200, strategy=VitisOptStrategy.PERFORMANCE):
+        super().__init__()
+        self.platform = platform
+        self.f_mhz = f_mhz
+        self.strategy = strategy
+
+    def apply(self, model):
+        _check_vitis_envvars()
+        # create a config file and empty list of xo files
+        config = ["[connectivity]"]
+        object_files = []
+        idma_idx = 0
+        odma_idx = 0
+        instance_names = {}
+        for node in model.graph.node:
+            assert node.op_type == "StreamingDataflowPartition", "Invalid link graph"
+            sdp_node = getCustomOp(node)
+            dataflow_model_filename = sdp_node.get_nodeattr("model")
+            kernel_model = ModelWrapper(dataflow_model_filename)
+            kernel_xo = kernel_model.get_metadata_prop("vitis_xo")
+            object_files.append(kernel_xo)
+            # gather info on connectivity
+            # assume each node connected to outputs/inputs is DMA:
+            # has axis, aximm and axilite
+            # everything else is axis-only
+            # assume only one connection from each ip to the next
+            # all aximm allocated to DDR[0]
+            # all kernels allocated to SLR0
+            producer = model.find_producer(node.input[0])
+            consumer = model.find_consumers(node.output[0])
+            # define kernel instances
+            # name kernels connected to graph inputs as idmaxx
+            # name kernels connected to graph inputs as odmaxx
+            if producer is None:
+                instance_names[node.name] = "idma" + str(idma_idx)
+                config.append("nk=%s:1:%s" % (node.name, instance_names[node.name]))
+                idma_idx += 1
+            elif consumer is None:
+                instance_names[node.name] = "odma" + str(odma_idx)
+                config.append("nk=%s:1:%s" % (node.name, instance_names[node.name]))
+                odma_idx += 1
+            else:
+                instance_names[node.name] = node.name
+                config.append("nk=%s:1:%s" % (node.name, instance_names[node.name]))
+            # assign SLRs
+            config.append("slr=%s:SLR0" % instance_names[node.name])
+            # assign memory banks
+            if producer is None or consumer is None:
+                config.append(
+                    "sp=%s.m_axi_gmem0:DDR[%d]" % (instance_names[node.name], 0)
+                )
+            # connect streams
+            if producer is not None:
+                for i in range(len(node.input)):
+                    producer = model.find_producer(node.input[i])
+                    if producer is not None:
+                        j = list(producer.output).index(node.input[i])
+                        config.append(
+                            "stream_connect=%s.m_axis_%d:%s.s_axis_%d"
+                            % (
+                                instance_names[producer.name],
+                                j,
+                                instance_names[node.name],
+                                i,
+                            )
+                        )
+
+        # create a temporary folder for the project
+        link_dir = make_build_dir(prefix="vitis_link_proj_")
+        model.set_metadata_prop("vitis_link_proj", link_dir)
+
+        config = "\n".join(config) + "\n"
+        with open(link_dir + "/config.txt", "w") as f:
+            f.write(config)
+
+        # create tcl script to generate resource report in XML format
+        gen_rep_xml = templates.vitis_gen_xml_report_tcl_template
+        gen_rep_xml = gen_rep_xml.replace("$VITIS_PROJ_PATH$", link_dir)
+        with open(link_dir + "/gen_report_xml.tcl", "w") as f:
+            f.write(gen_rep_xml)
+
+        # create a shell script and call Vitis
+        script = link_dir + "/run_vitis_link.sh"
+        working_dir = os.environ["PWD"]
+        with open(script, "w") as f:
+            f.write("#!/bin/bash \n")
+            f.write("cd {}\n".format(link_dir))
+            f.write(
+                "v++ -t hw --platform %s --link %s"
+                " --kernel_frequency %d --config config.txt --optimize %s"
+                " --save-temps -R2\n"
+                % (
+                    self.platform,
+                    " ".join(object_files),
+                    self.f_mhz,
+                    self.strategy.value,
+                )
+            )
+            f.write("cd {}\n".format(working_dir))
+        bash_command = ["bash", script]
+        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+        process_compile.communicate()
+        # TODO rename xclbin appropriately here?
+        xclbin = link_dir + "/a.xclbin"
+        assert os.path.isfile(xclbin), (
+            "Vitis .xclbin file not created, check logs under %s" % link_dir
+        )
+        model.set_metadata_prop("bitfile", xclbin)
+
+        # run Vivado to gen xml report
+        gen_rep_xml_sh = link_dir + "/gen_report_xml.sh"
+        working_dir = os.environ["PWD"]
+        with open(gen_rep_xml_sh, "w") as f:
+            f.write("#!/bin/bash \n")
+            f.write("cd {}\n".format(link_dir))
+            f.write(
+                "vivado -mode tcl -source %s\n" % (link_dir + "/gen_report_xml.tcl")
+            )
+            f.write("cd {}\n".format(working_dir))
+        bash_command = ["bash", gen_rep_xml_sh]
+        process_genxml = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
+        process_genxml.communicate()
+        # filename for the synth utilization report
+        synth_report_filename = link_dir + "/synth_report.xml"
+        model.set_metadata_prop("vivado_synth_rpt", synth_report_filename)
+        return (model, False)
+
+
+class VitisBuild(Transformation):
+    """Best-effort attempt at building the accelerator with Vitis."""
+
+    def __init__(
+        self, fpga_part, period_ns, platform, strategy=VitisOptStrategy.PERFORMANCE
+    ):
+        super().__init__()
+        self.fpga_part = fpga_part
+        self.period_ns = period_ns
+        self.platform = platform
+        self.strategy = strategy
+
+    def apply(self, model):
+        _check_vitis_envvars()
+        # first infer layouts
+        model = model.transform(InferDataLayouts())
+        # prepare at global level, then break up into kernels
+        prep_transforms = [
+            MakePYNQDriver(platform="alveo"),
+            InsertIODMA(512),
+            InsertDWC(),
+            Floorplan(),
+            CreateDataflowPartition(),
+        ]
+        for trn in prep_transforms:
+            model = model.transform(trn)
+            model = model.transform(GiveUniqueNodeNames())
+            model = model.transform(GiveReadableTensorNames())
+        # Build each kernel individually
+        sdp_nodes = model.get_nodes_by_op_type("StreamingDataflowPartition")
+        for sdp_node in sdp_nodes:
+            sdp_node = getCustomOp(sdp_node)
+            dataflow_model_filename = sdp_node.get_nodeattr("model")
+            kernel_model = ModelWrapper(dataflow_model_filename)
+            kernel_model = kernel_model.transform(InsertFIFO())
+            kernel_model = kernel_model.transform(
+                InsertTLastMarker(both=True, external=False, dynamic=False)
+            )
+            kernel_model = kernel_model.transform(GiveUniqueNodeNames())
+            kernel_model.save(dataflow_model_filename)
+            kernel_model = kernel_model.transform(
+                PrepareIP(self.fpga_part, self.period_ns)
+            )
+            kernel_model = kernel_model.transform(HLSSynthIP())
+            kernel_model = kernel_model.transform(
+                CreateStitchedIP(
+                    self.fpga_part, self.period_ns, sdp_node.onnx_node.name, True
+                )
+            )
+            kernel_model = kernel_model.transform(
+                CreateVitisXO(sdp_node.onnx_node.name)
+            )
+            kernel_model.set_metadata_prop("platform", "alveo")
+            kernel_model.save(dataflow_model_filename)
+        # Assemble design from kernels
+        model = model.transform(
+            VitisLink(
+                self.platform, round(1000 / self.period_ns), strategy=self.strategy
+            )
+        )
+        # set platform attribute for correct remote execution
+        model.set_metadata_prop("platform", "alveo")
+
+        return (model, False)
diff --git a/src/finn/transformation/general.py b/src/finn/transformation/general.py
index 53c73e1dc4fe0bfab53e3f126add992cb338c11d..02f95b14e7944b828ae4c71ebf26851dc90b755d 100644
--- a/src/finn/transformation/general.py
+++ b/src/finn/transformation/general.py
@@ -28,17 +28,72 @@
 
 import finn.util.basic as util
 from finn.transformation import Transformation
+from toposort import toposort_flatten
+
+
+class RemoveUnusedTensors(Transformation):
+    """Remove any unused tensors in the graph by removing any initializers,
+    ValueInfo and tensor annotations associated with it. Unused tensors do not
+    appear as any input/output for any graph nodes.
+    """
+
+    def apply(self, model):
+        graph_modified = False
+        onnx_graph = model.model.graph
+        # build a set of tensors that we actually use in the graph nodes
+        used_tensors = set()
+        for node in model.graph.node:
+            for i in node.input:
+                used_tensors.add(i)
+            for o in node.output:
+                used_tensors.add(o)
+        # remove initializers, value_info and annotations that are not in the
+        # used set of tensors, as determined by the graph node i/o
+        for init in onnx_graph.initializer:
+            if init.name not in used_tensors:
+                onnx_graph.initializer.remove(init)
+                graph_modified = True
+        for vi in onnx_graph.value_info:
+            if vi.name not in used_tensors:
+                onnx_graph.value_info.remove(vi)
+                graph_modified = True
+        for qa in onnx_graph.quantization_annotation:
+            if qa.tensor_name not in used_tensors:
+                onnx_graph.quantization_annotation.remove(qa)
+                graph_modified = True
+
+        return (model, graph_modified)
+
+
+class RemoveStaticGraphInputs(Transformation):
+    "Remove any top-level graph inputs that have initializers."
+
+    def apply(self, model):
+        graph_modified = False
+        for i in model.graph.input:
+            if model.get_initializer(i.name) is not None:
+                # move ValueInfo to internal (value_info) container
+                model.graph.value_info.append(i)
+                model.graph.input.remove(i)
+                graph_modified = True
+
+        return (model, graph_modified)
 
 
 class GiveUniqueNodeNames(Transformation):
-    """Give unique names to each node in the graph using enumeration."""
+    """Give unique names to each node in the graph using enumeration, starting
+    with given prefix (if specified in the constructor)."""
+
+    def __init__(self, prefix=""):
+        super().__init__()
+        self.prefix = prefix
 
     def apply(self, model):
         optype_count = {}
         for n in model.graph.node:
             if n.op_type not in optype_count.keys():
                 optype_count[n.op_type] = 0
-            n.name = "%s_%d" % (n.op_type, optype_count[n.op_type])
+            n.name = "%s%s_%d" % (self.prefix, n.op_type, optype_count[n.op_type])
             optype_count[n.op_type] += 1
         # return model_was_changed = False as single iteration is always enough
         return (model, False)
@@ -56,8 +111,9 @@ class GiveRandomTensorNames(Transformation):
 
 
 class GiveReadableTensorNames(Transformation):
-    """Give more human-readable names to all internal tensors. It's recommended
-    to apply give_unique_node_names prior to this transform."""
+    """Give more human-readable names to all internal tensors. You should
+    apply GiveUniqueNodeNames prior to this transform to avoid empty node names,
+    as the readable names are based on the node names."""
 
     def apply(self, model):
         # to ensure we can use rename_tensor safely (without renaming existing
@@ -65,6 +121,7 @@ class GiveReadableTensorNames(Transformation):
         model = model.transform(GiveRandomTensorNames())
         graph = model.graph
         for n in graph.node:
+            assert n.name != "", "Found empty node name"
             out_num = 0
             for o in n.output:
                 model.rename_tensor(o, "%s_out%d" % (n.name, out_num))
@@ -81,6 +138,95 @@ class GiveReadableTensorNames(Transformation):
         return (model, False)
 
 
+class GiveUniqueParameterTensors(Transformation):
+    """Make every parameter tensor unique. The aim is to avoid affecting
+    other nodes apart from the one the system is currently operating on."""
+
+    def apply(self, model):
+        graph = model.graph
+        graph_modified = False
+        seen_parameters = []
+        for n in graph.node:
+            # copy inputs since they may be modified
+            node_inputs_list = [x for x in n.input]
+            for input_idx, node_input in enumerate(node_inputs_list):
+                # check if it's a parameter
+                input_init = model.get_initializer(node_input)
+                if input_init is None:
+                    # dynamic input
+                    continue
+
+                # check if repeated
+                if node_input not in seen_parameters:
+                    # first occurance
+                    seen_parameters += [node_input]
+                    continue
+
+                new_param_name = model.make_new_valueinfo_name()
+
+                model.set_initializer(new_param_name, input_init)
+                model.set_tensor_datatype(
+                    new_param_name, model.get_tensor_datatype(node_input)
+                )
+
+                # point node input to new tensor
+                n.input[input_idx] = new_param_name
+
+        return (model, graph_modified)
+
+
+class SortGraph(Transformation):
+    """ Returns the model with its node list sorted topologically.
+    Any ONNX graph to be executed must have a topologically sorted node list,
+    as dictated by the ONNX standard.
+    """
+
+    # Notes on SortGraph performance:
+    # benchmark in  tests/transformation/test_sort_graph.py
+    # The algorithm doesn't move initializers so its performance should only depend on
+    # the number of nodes
+    #
+    # Relative order of magnitudes for time per step:
+    # - Gather graph structure:       base
+    # - Sort nodes:                   0.1 of base
+    # - Remove and insert in order :  0.001 of base
+    #
+    # Notes:
+    # Remove nodes and insert them in order:
+    # Probably this is faster than copying initializers and more robust in general
+
+    def apply(self, model):
+        if len(model.graph.node) == 1:
+            # single-node graph, nothing to sort
+            return (model, False)
+        # Gather graph structure
+        graph_dependencies = {}
+        node_list = [
+            n for n in model.graph.node
+        ]  # I also need the list to remove the nodes
+        for node_idx, n in enumerate(node_list):
+            node_pred = model.find_direct_predecessors(n)
+            if node_pred is None:
+                # Will also eliminate nodes that are floating around for some reason
+                continue
+
+            node_dependencies = [node_list.index(pred) for pred in node_pred]
+            graph_dependencies[node_idx] = set(node_dependencies)
+
+        # Sort nodes
+        sorted_node_indexes = toposort_flatten(graph_dependencies)
+
+        # Remove nodes and insert them in order
+        # Can't remove nodes before if I want to use model.find_direct_predecessors()
+        for n in node_list:
+            model.graph.node.remove(n)
+
+        for new_idx, sorted_idx in enumerate(sorted_node_indexes):
+            model.graph.node.insert(new_idx, node_list[sorted_idx])
+
+        return (model, False)
+
+
 class ConvertSubToAdd(Transformation):
     """Convert subtract-a-constant nodes to add-a-constant nodes."""
 
diff --git a/src/finn/transformation/infer_data_layouts.py b/src/finn/transformation/infer_data_layouts.py
new file mode 100644
index 0000000000000000000000000000000000000000..d07162fa049bd016e91b8c5b01ea56eda6267655
--- /dev/null
+++ b/src/finn/transformation/infer_data_layouts.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import finn.custom_op.registry as registry
+import finn.core.data_layout as DataLayout
+from finn.transformation import Transformation
+import warnings
+from finn.util.basic import get_by_name
+
+
+def _dims_to_layout(model, node, ndims):
+    if ndims == 2:
+        return DataLayout.NC
+    else:
+        if node.domain == "finn":
+            if node.op_type == "MultiThreshold" or node.op_type == "QuantAvgPool2d":
+                mt_inst = registry.getCustomOp(node)
+                layout = mt_inst.get_nodeattr("data_layout")
+                if layout == "NHWC" and ndims == 4:
+                    return DataLayout.NHWC
+                elif layout == "NCHW" and ndims == 4:
+                    return DataLayout.NCHW
+                else:
+                    return DataLayout.UNKNOWN
+            else:
+                if ndims == 4:
+                    return DataLayout.NHWC
+                else:
+                    return DataLayout.UNKNOWN
+        else:
+            # propagate input layout to output
+            # TODO this won't work for concat, squeeze/unsqueeze/reshape...
+            return model.get_tensor_layout(node.input[0])
+
+
+def _infer_node_data_layout(model, node):
+    """Infer output data layout annotation(s) for a particular node.
+    Returns True if any changes were made."""
+    old_layouts = list(map(lambda x: model.get_tensor_layout(x), node.output))
+    if node.domain == "finn":
+        # try to guess based on number of output dims
+        for o in node.output:
+            ndims = len(model.get_tensor_shape(o))
+            new_layout = _dims_to_layout(model, node, ndims)
+            model.set_tensor_layout(o, new_layout)
+    else:
+        if node.op_type == "Transpose":
+            # grab input annotation and switch it around using perm
+            perm = get_by_name(node.attribute, "perm").ints
+            inp_layout = model.get_tensor_layout(node.input[0])
+            out_layout = [inp_layout[i] for i in perm]
+            model.set_tensor_layout(node.output[0], out_layout)
+        elif node.op_type == "Unsqueeze":
+            inp_layout = model.get_tensor_layout(node.input[0])
+            # add dummy dimension at the output
+            out_layout = inp_layout + ["x"]
+            model.set_tensor_layout(node.output[0], out_layout)
+        elif node.op_type == "Squeeze":
+            inp_layout = model.get_tensor_layout(node.input[0])
+            assert inp_layout[-1] == "x"
+            # remove dummy dimension
+            out_layout = inp_layout[:-1]
+            model.set_tensor_layout(node.output[0], out_layout)
+        else:
+            # try to guess based on number of output dims
+            for o in node.output:
+                ndims = len(model.get_tensor_shape(o))
+                model.set_tensor_layout(o, _dims_to_layout(model, node, ndims))
+    # compare old and new output dtypes to see if anything changed
+    new_layouts = list(map(lambda x: model.get_tensor_layout(x), node.output))
+    graph_modified = new_layouts != old_layouts
+    return graph_modified
+
+
+class InferDataLayouts(Transformation):
+    """Try to infer data layout annotations info for all input/intermediate/output
+    tensors based on inputs and node type."""
+
+    def apply(self, model):
+        graph = model.graph
+        graph_modified = False
+        # first, make sure that the global input has an annotation
+        # this is really hard to do in general, so we do some bad guesswork
+        inp_name = graph.input[0].name
+        if model.get_tensor_layout(inp_name) is None:
+            inp_shape = model.get_tensor_shape(inp_name)
+            if len(inp_shape) == 4:
+                warnings.warn("Assuming 4D input is NCHW")
+                model.set_tensor_layout(inp_name, DataLayout.NCHW)
+                graph_modified = True
+            elif len(inp_shape) == 2:
+                graph_modified = True
+                warnings.warn("Assuming 2D input is NC")
+                model.set_tensor_layout(inp_name, DataLayout.NC)
+            else:
+                raise Exception(
+                    """Unknown number of dims for input, don't know
+                how to annotate"""
+                )
+        for node in graph.node:
+            graph_modified |= _infer_node_data_layout(model, node)
+        return (model, graph_modified)
diff --git a/src/finn/transformation/infer_datatypes.py b/src/finn/transformation/infer_datatypes.py
index 1acd4e3abe2d77248810cf15c15475e806a3bd32..39b7a787be8c725e7b6d474757dd96fc4848dfe0 100644
--- a/src/finn/transformation/infer_datatypes.py
+++ b/src/finn/transformation/infer_datatypes.py
@@ -71,7 +71,13 @@ def _infer_node_datatype(model, node):
         else:
             # unknown, assume node produces float32 outputs
             for o in node.output:
-                model.set_tensor_datatype(o, DataType.FLOAT32)
+                # check if output datatype is already set to a value != FLOAT32
+                odtype = model.get_tensor_datatype(o)
+                if odtype is not None and odtype != DataType.FLOAT32:
+                    # don't change data type
+                    model.set_tensor_datatype(o, odtype)
+                else:
+                    model.set_tensor_datatype(o, DataType.FLOAT32)
     # compare old and new output dtypes to see if anything changed
     new_odtypes = list(map(lambda x: model.get_tensor_datatype(x), node.output))
     graph_modified = new_odtypes != odtypes
diff --git a/src/finn/transformation/lower_convs_to_matmul.py b/src/finn/transformation/lower_convs_to_matmul.py
index 3da785d8dd21b2c6701bffc8ce3869fb14b237a9..e5a1f778d0cac48925ecd97ae8b970f7bdab9c4f 100644
--- a/src/finn/transformation/lower_convs_to_matmul.py
+++ b/src/finn/transformation/lower_convs_to_matmul.py
@@ -26,6 +26,7 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import numpy as np
 from onnx import TensorProto
 from onnx import helper
 
@@ -54,12 +55,34 @@ class LowerConvsToMatMul(Transformation):
                 k = get_by_name(n.attribute, "kernel_shape").ints[-1]
                 pad = get_by_name(n.attribute, "pads").ints[-1]
                 stride = get_by_name(n.attribute, "strides").ints[-1]
+                group = get_by_name(n.attribute, "group").i
                 weight_name = n.input[1]
                 W_conv = model.get_initializer(weight_name)
-                ifm_ch = W_conv.shape[1]
-                ofm_ch = W_conv.shape[0]
+                ifm_ch = model.get_tensor_shape(n.input[0])[1]  # assume NCHW
+                ofm_ch = model.get_tensor_shape(n.output[0])[1]  # assume NCHW
                 ifm_dim = model.get_tensor_shape(n.input[0])[-1]  # assume NCHW
                 ofm_dim = model.get_tensor_shape(n.output[0])[-1]  # assume NCHW
+
+                # if depthwise conv create sparse matrix and variable "dw"
+                # to store as attribute in Im2Col that indicates that the created
+                # Im2Col node belongs to a depthwise convolution
+                dw = False
+                if group == ifm_ch and ofm_ch == ifm_ch:
+                    W_sparse = np.zeros((ofm_ch, ifm_ch, k, k))
+                    for ch in range(ifm_ch):
+                        W_sparse[ch][ch] = W_conv[ch][0]
+                    W_conv = W_sparse.astype(np.float32)
+                    # we need to store information of the
+                    # sparsity of the weight matrix. For this
+                    # we use the sparsity annotation of the
+                    # weight tensor
+                    sparsity = {"dw": {"kernel_shape": k}}
+                    model.set_tensor_sparsity(weight_name, sparsity)
+                    # additionally create variable "dw" to store
+                    # as attribute in Im2Col that indicates that the created
+                    # Im2Col node belongs to a depthwise convolution
+                    dw = True
+
                 # reuse conv weights for new matmul weights
                 # conv weights are [OFM][IFM][k][k]
                 # first convert to [OFM][k][k][IFM] (to remain compatible with
@@ -70,6 +93,7 @@ class LowerConvsToMatMul(Transformation):
                 # transpose to get ONNX-compatible [k*k*IFM][OFM] matrix
                 W_matmul = W_matmul.T
                 model.set_initializer(weight_name, W_matmul)
+
                 # create new intermediate values
                 inp_trans_out = helper.make_tensor_value_info(
                     model.make_new_valueinfo_name(),
@@ -80,14 +104,19 @@ class LowerConvsToMatMul(Transformation):
                 inp_trans_out = inp_trans_out.name
                 model.set_tensor_datatype(inp_trans_out, idt)
 
-                im2col_out = helper.make_tensor_value_info(
-                    model.make_new_valueinfo_name(),
-                    TensorProto.FLOAT,
-                    (1, ofm_dim, ofm_dim, ifm_ch * k * k),
-                )
-                graph.value_info.append(im2col_out)
-                im2col_out = im2col_out.name
-                model.set_tensor_datatype(im2col_out, idt)
+                need_im2col = True
+                if k == 1 and pad == 0 and stride == 1:
+                    need_im2col = False
+
+                if need_im2col:
+                    im2col_out = helper.make_tensor_value_info(
+                        model.make_new_valueinfo_name(),
+                        TensorProto.FLOAT,
+                        (1, ofm_dim, ofm_dim, ifm_ch * k * k),
+                    )
+                    graph.value_info.append(im2col_out)
+                    im2col_out = im2col_out.name
+                    model.set_tensor_datatype(im2col_out, idt)
 
                 matmul_out = helper.make_tensor_value_info(
                     model.make_new_valueinfo_name(),
@@ -104,19 +133,24 @@ class LowerConvsToMatMul(Transformation):
                     "Transpose", [cnv_input], [inp_trans_out], perm=[0, 2, 3, 1]
                 )
                 # lower input tensor
-                im2col_node = helper.make_node(
-                    "Im2Col",
-                    [inp_trans_out],
-                    [im2col_out],
-                    domain="finn",
-                    stride=stride,
-                    kernel_size=k,
-                    pad_amount=pad,
-                    input_shape="(1,{},{},{})".format(ifm_dim, ifm_dim, ifm_ch),
-                )
+                matmul_input = inp_trans_out
+                if need_im2col:
+                    matmul_input = im2col_out
+                    im2col_node = helper.make_node(
+                        "Im2Col",
+                        [inp_trans_out],
+                        [im2col_out],
+                        domain="finn",
+                        stride=stride,
+                        kernel_size=k,
+                        pad_amount=pad,
+                        input_shape="(1,{},{},{})".format(ifm_dim, ifm_dim, ifm_ch),
+                        depthwise=dw,
+                    )
+
                 # do matmul
                 matmul_node = helper.make_node(
-                    "MatMul", [im2col_out, weight_name], [matmul_out]
+                    "MatMul", [matmul_input, weight_name], [matmul_out]
                 )
                 # NHWC -> NCHW
                 out_trans_node = helper.make_node(
@@ -124,9 +158,13 @@ class LowerConvsToMatMul(Transformation):
                 )
                 # insert nodes where the conv is to preserve topological ordering
                 graph.node.insert(node_ind, inp_trans_node)
-                graph.node.insert(node_ind + 1, im2col_node)
-                graph.node.insert(node_ind + 2, matmul_node)
-                graph.node.insert(node_ind + 3, out_trans_node)
+                if need_im2col:
+                    graph.node.insert(node_ind + 1, im2col_node)
+                    graph.node.insert(node_ind + 2, matmul_node)
+                    graph.node.insert(node_ind + 3, out_trans_node)
+                else:
+                    graph.node.insert(node_ind + 1, matmul_node)
+                    graph.node.insert(node_ind + 2, out_trans_node)
                 # remove old nodes
                 graph.node.remove(n)
         model = model.transform(InferShapes())
diff --git a/src/finn/transformation/merge_onnx_models.py b/src/finn/transformation/merge_onnx_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a84910832fc55b9383b2d731c05c53f42368631
--- /dev/null
+++ b/src/finn/transformation/merge_onnx_models.py
@@ -0,0 +1,164 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import copy
+import warnings
+from onnx import helper
+
+from finn.transformation import Transformation
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import (
+    GiveReadableTensorNames,
+    GiveRandomTensorNames,
+    GiveUniqueNodeNames,
+    GiveUniqueParameterTensors,
+)
+
+
+class MergeONNXModels(Transformation):
+    """Merges two models. The model passed in the transformation will be inserted before
+    the model the transformation is applied on, the resulting model is returned.
+    This transformation will try to connect graph.output[0] of the pre model and
+    graph.input[0] of the post model.
+    If more than one input or output exists, a warning is raised."""
+
+    def __init__(self, pre_model):
+        super().__init__()
+        # use deep copy of model that should be inserted in the beginning of
+        # the other model to ensure that it stays unchanged
+        self.pre_model = copy.deepcopy(pre_model)
+
+    def apply(self, model):
+        graph_modified = False
+        pre_model = self.pre_model
+        post_model = copy.deepcopy(model)
+        # to avoid mix-ups, start by giving all tensors random names
+        pre_model = pre_model.transform(GiveRandomTensorNames())
+        post_model = post_model.transform(GiveRandomTensorNames())
+
+        # check for dynamic outputs of pre model
+        dyn_outp = []
+        for outp in pre_model.graph.output:
+            init_val = pre_model.get_initializer(outp.name)
+            if init_val is None:
+                dyn_outp.append(outp)
+
+        if len(dyn_outp) != 1:
+            warnings.warn(
+                "The pre model has more than one dynamic output! The transformation "
+                "tries to connect the first dynamic output to the first dynamic input "
+                "of the post model."
+            )
+
+        # check for dynamic inputs of post model
+        dyn_inp = []
+        for inp in post_model.graph.input:
+            init_val = post_model.get_initializer(inp.name)
+            if init_val is None:
+                dyn_inp.append(inp)
+
+        if len(dyn_inp) != 1:
+            warnings.warn(
+                "The post model has more than one dynamic input! The transformation "
+                "tries to connect the first dynamic input to the first dynamic output "
+                "of the pre model."
+            )
+
+        # erase all node names to avoid conflict
+        for n in pre_model.graph.node:
+            n.name = ""
+        for n in post_model.graph.node:
+            n.name = ""
+
+        # check if models can be merged
+        output_model_a = dyn_outp[0].name
+        input_model_b = dyn_inp[0].name
+        output_a_shape = pre_model.get_tensor_shape(output_model_a)
+        input_b_shape = post_model.get_tensor_shape(input_model_b)
+        assert (
+            output_a_shape == input_b_shape
+        ), "Models can't be merged! Shapes don't match."
+
+        # connect output of one model to input of the other
+        for n in pre_model.graph.node:
+            if output_model_a == n.output[0]:
+                n.output[0] = input_model_b
+
+        # extract information for new model
+
+        # nodes
+        node_pre = [node for node in pre_model.graph.node]
+        node_post = [node for node in post_model.graph.node]
+        node_new = node_pre + node_post
+
+        # in and output
+        inp = pre_model.graph.input[0]
+        outp = post_model.graph.output[0]
+
+        vi_pre = [x for x in pre_model.graph.value_info]
+        out_pre = [x for x in pre_model.graph.output]
+        qa_pre = [x for x in pre_model.graph.quantization_annotation]
+        init_pre = [x for x in pre_model.graph.initializer]
+
+        vi_post = [x for x in post_model.graph.value_info]
+        qa_post = [x for x in post_model.graph.quantization_annotation]
+        init_post = [x for x in post_model.graph.initializer]
+
+        vi_new = vi_pre + vi_post + out_pre
+        qa_new = qa_pre + qa_post
+        init_new = init_pre + init_post
+
+        # create new graph and model
+        new_graph = helper.make_graph(
+            nodes=node_new,
+            name="fuse-graph",
+            inputs=[inp],
+            outputs=[outp],
+            value_info=vi_new,
+        )
+
+        new_model = helper.make_model(new_graph, producer_name="fuse_model")
+        new_model = ModelWrapper(new_model)
+
+        for i in init_new:
+            new_model.graph.initializer.append(i)
+        for qa in qa_new:
+            new_model.graph.quantization_annotation.append(qa)
+
+        # tidy-up new model
+        model = new_model
+        model = model.transform(InferShapes())
+        model = model.transform(InferDataTypes())
+        model = model.transform(InferDataLayouts())
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(GiveUniqueParameterTensors())
+        model = model.transform(GiveReadableTensorNames())
+
+        return (model, graph_modified)
diff --git a/src/finn/transformation/move_reshape.py b/src/finn/transformation/move_reshape.py
index 6a30fd93cc0bdc322b6ec7d892d42d3c3ca96fd6..9943d371dad79a977b61810bcddafdcba505d6cc 100644
--- a/src/finn/transformation/move_reshape.py
+++ b/src/finn/transformation/move_reshape.py
@@ -17,7 +17,7 @@ def _is_fpgadataflow_node(node):
         return False
 
 
-class MoveReshape(Transformation):
+class RemoveCNVtoFCFlatten(Transformation):
     """Removes a node that implements a (1, -1) reshape if it is
     between two fpgadataflow nodes"""
 
@@ -27,14 +27,24 @@ class MoveReshape(Transformation):
         graph_modified = False
         for n in graph.node:
             if n.op_type == "Reshape":
-                graph_modified = True
                 shape = model.get_initializer(n.input[1])
                 if (shape == [1, -1]).all():
                     producer = model.find_producer(n.input[0])
                     if _is_fpgadataflow_node(producer) is True:
                         consumer = model.find_consumer(n.output[0])
                         if _is_fpgadataflow_node(consumer) is True:
+                            graph_modified = True
                             consumer.input[0] = n.input[0]
                             graph.node.remove(n)
+                    elif producer.op_type == "Transpose":
+                        transp_node = producer
+                        producer = model.find_producer(transp_node.input[0])
+                        if _is_fpgadataflow_node(producer) is True:
+                            consumer = model.find_consumer(n.output[0])
+                            if _is_fpgadataflow_node(consumer) is True:
+                                graph_modified = True
+                                consumer.input[0] = transp_node.input[0]
+                                graph.node.remove(n)
+                                graph.node.remove(transp_node)
 
         return (model, graph_modified)
diff --git a/src/finn/transformation/streamline/__init__.py b/src/finn/transformation/streamline/__init__.py
index c9c73fa4c8303ee28bc1cc6aee879d633740e01e..d7686eaadcbc800542ab96c5f45145857412b773 100644
--- a/src/finn/transformation/streamline/__init__.py
+++ b/src/finn/transformation/streamline/__init__.py
@@ -41,6 +41,7 @@ from finn.transformation.streamline.absorb import (
     FactorOutMulSignMagnitude,
     Absorb1BitMulIntoMatMul,
     Absorb1BitMulIntoConv,
+    AbsorbSignBiasIntoMultiThreshold,
 )
 
 from finn.transformation.streamline.collapse_repeated import (
@@ -52,13 +53,14 @@ from finn.transformation.streamline.reorder import (
     MoveAddPastMul,
     MoveScalarMulPastMatMul,
     MoveScalarAddPastMatMul,
-    MoveScalarAddPastConv,
+    MoveAddPastConv,
     MoveScalarMulPastConv,
 )
 
 from finn.transformation.streamline.round_thresholds import RoundAndClipThresholds
 from finn.transformation.streamline.sign_to_thres import ConvertSignToThres
 from finn.transformation.batchnorm_to_affine import BatchNormToAffine
+from finn.transformation.streamline.remove import RemoveIdentityOps
 
 
 class Streamline(Transformation):
@@ -70,9 +72,10 @@ class Streamline(Transformation):
             ConvertDivToMul(),
             BatchNormToAffine(),
             ConvertSignToThres(),
+            AbsorbSignBiasIntoMultiThreshold(),
             MoveAddPastMul(),
             MoveScalarAddPastMatMul(),
-            MoveScalarAddPastConv(),
+            MoveAddPastConv(),
             MoveScalarMulPastMatMul(),
             MoveScalarMulPastConv(),
             MoveAddPastMul(),
@@ -87,6 +90,7 @@ class Streamline(Transformation):
         ]
         for trn in streamline_transformations:
             model = model.transform(trn)
+            model = model.transform(RemoveIdentityOps())
             model = model.transform(GiveUniqueNodeNames())
             model = model.transform(GiveReadableTensorNames())
             model = model.transform(InferDataTypes())
diff --git a/src/finn/transformation/streamline/absorb.py b/src/finn/transformation/streamline/absorb.py
index 0d709297a9132b15b51435b7ab4b51ce55c7e9f3..0f2c5525d91263b44002677b505087d38408333a 100644
--- a/src/finn/transformation/streamline/absorb.py
+++ b/src/finn/transformation/streamline/absorb.py
@@ -28,14 +28,81 @@
 
 import numpy as np
 from onnx import helper as oh
+import warnings
 
 from finn.core.datatype import DataType
+import finn.core.data_layout as DataLayout
 from finn.transformation import Transformation
 from finn.util.basic import get_by_name
 from finn.custom_op.registry import getCustomOp
+from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.infer_datatypes import InferDataTypes
 
 
+class AbsorbSignBiasIntoMultiThreshold(Transformation):
+    """Absorb scalar bias originating from signed int export back into
+    MultiThreshold and re-evaluate the output datatype."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            # search for (MultiThreshold, Add) pair
+            node_ind += 1
+            if (
+                n.op_type == "MultiThreshold"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+                consumer = model.find_consumer(n.output[0])
+                if consumer is not None and consumer.op_type == "Add":
+                    mt_node = n
+                    add_node = consumer
+                    threshold_name = mt_node.input[1]
+                    add_weight_name = add_node.input[1]
+                    T = model.get_initializer(threshold_name)
+                    A = model.get_initializer(add_weight_name)
+                    if (A is None) or (T is None):
+                        warnings.warn("Threshold or add bias not constant, skipping")
+                        continue
+                    end_name = add_node.output[0]
+                    # we can only absorb scalar adds
+                    is_scalar = A.ndim == 0 or all(x == 1 for x in A.shape)
+                    if not is_scalar:
+                        continue
+                    bias = A.flatten()[0]
+                    # set MultiThreshold bias property
+                    mt_inst = getCustomOp(mt_node)
+                    bias += mt_inst.get_nodeattr("out_bias")
+                    mt_inst.set_nodeattr("out_bias", bias)
+                    graph_modified = True
+                    # compute new DataType for MultiThreshold output
+                    steps = T.shape[-1]
+                    new_min = bias
+                    new_max = steps + bias
+                    odt = DataType.get_smallest_possible(steps).name.replace(
+                        "UINT", "INT"
+                    )
+                    odt = DataType[odt]
+                    assert odt.allowed(new_max) and odt.allowed(
+                        new_min
+                    ), """Could
+                    not compute new MultiThreshold DataType (min = %d max = %d)""" % (
+                        new_min,
+                        new_max,
+                    )
+                    mt_inst.set_nodeattr("out_dtype", odt.name)
+                    # remove Add node, rewire MultiThreshold
+                    graph.node.remove(add_node)
+                    mt_node.output[0] = end_name
+                    # set datatype
+                    model.set_tensor_datatype(end_name, odt)
+        if graph_modified:
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
 class AbsorbAddIntoMultiThreshold(Transformation):
     """Absorb preceding Add ops into MultiThreshold by updating the threshold
     values. Only scalar/1D add vectors can be absorbed."""
@@ -46,7 +113,11 @@ class AbsorbAddIntoMultiThreshold(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Add":
+            if (
+                n.op_type == "Add"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
                 if consumer is not None and consumer.op_type == "MultiThreshold":
                     add_weight_name = n.input[1]
@@ -83,7 +154,11 @@ class AbsorbMulIntoMultiThreshold(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Mul":
+            if (
+                n.op_type == "Mul"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 mul_weight_name = n.input[1]
                 A = model.get_initializer(mul_weight_name)
                 assert A is not None, "Initializer for mul weights is not set."
@@ -242,11 +317,13 @@ class AbsorbTransposeIntoMultiThreshold(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Transpose":
+            if n.op_type == "Transpose" and not model.is_fork_node(n):
                 perms = list(get_by_name(n.attribute, "perm").ints)
                 if perms == [0, 3, 1, 2]:
                     mt_cand = model.find_consumer(n.output[0])
-                    if mt_cand.op_type == "MultiThreshold":
+                    if mt_cand.op_type == "MultiThreshold" and not model.is_fork_node(
+                        mt_cand
+                    ):
                         final_t_cand = model.find_consumer(mt_cand.output[0])
                         if final_t_cand.op_type == "Transpose":
                             perms = list(
@@ -282,3 +359,186 @@ class AbsorbTransposeIntoMultiThreshold(Transformation):
         if graph_modified:
             model = model.transform(InferDataTypes())
         return (model, graph_modified)
+
+
+class AbsorbTransposeIntoFlatten(Transformation):
+    """Absorb transpose node into succeeding flatten node, if H=W=1 and the first
+    dimension stays the same. Can also be applied if flatten is implemented implicitly
+    by a reshape node with shape [1, -1] and the first input dimension is 1"""
+
+    def apply(self, model):
+        graph = model.graph
+        graph_modified = False
+        node_ind = 0
+        for n in graph.node:
+            node_ind += 1
+            if (
+                n.op_type == "Reshape"
+                and (model.get_initializer(n.input[1]) == [1, -1]).all()
+            ) or n.op_type == "Flatten":
+                prod = model.find_producer(n.input[0])
+                if (
+                    prod is not None
+                    and prod.op_type == "Transpose"
+                    # we ensure that the first dimension is not changed from the
+                    # transpose operation
+                    and get_by_name(prod.attribute, "perm").ints[0] == 0
+                ):
+                    data_layout = model.get_tensor_layout(prod.input[0])
+                    # check for the data layout to interpret input shape correctly
+                    if data_layout is None:
+                        warnings.warn(
+                            """Data layout for input tensor of Transpose node is not set.
+                                To use AbsorbTransposeIntoFlatten transformation
+                                please set tensor data layout."""
+                        )
+                        continue
+                    elif data_layout == DataLayout.NCHW:
+                        (b, c, h, w) = model.get_tensor_shape(prod.input[0])
+                        # if h=w=1 the transposition can be absorbed, otherwise
+                        # the absorption would lead to an error in the behavior
+                        if h != 1 or w != 1:
+                            continue
+                        # the flatten node from onnx keeps by default the first
+                        # dim and flattens the rest, that is why this transformation
+                        # can only work with b != 1 if the model contains already a
+                        # flatten node and not a reshape node with shape = [1, -1].
+                        # If the first  dim of the input tensor is not 1, flatten and
+                        # reshape (with shape = [1, -1]) would lead to different results
+                        if n.op_type == "Reshape" and b != 1:
+                            continue
+                    elif data_layout == DataLayout.NHWC:
+                        (b, h, w, c) = model.get_tensor_shape(prod.input[0])
+                        if h != 1 or w != 1:
+                            continue
+                        if n.op_type == "Reshape" and b != 1:
+                            continue
+                    # create single flatten node and remove obsolete nodes
+                    node = oh.make_node("Flatten", [prod.input[0]], [n.output[0]])
+                    graph.node.remove(n)
+                    graph.node.remove(prod)
+                    graph.node.insert(node_ind, node)
+                    graph_modified = True
+        if graph_modified:
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class AbsorbScalarMulAddIntoTopK(Transformation):
+    """Remove mul/add node prior to topk node if the op is scalar. Note that
+    the TopK output probabilities will change, but the indices won't."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if n.op_type == "TopK":
+                prod = model.find_producer(n.input[0])
+                if prod is not None and (prod.op_type in ["Mul", "Add"]):
+                    prod_input = prod.input[0]
+                    param_name = prod.input[1]
+                    A = model.get_initializer(param_name)
+                    if A is None:
+                        warnings.warn("Param is not constant, skipping")
+                        continue
+                    is_scalar = all(x == 1 for x in A.shape)
+                    is_scalar_pos_mul = is_scalar and (prod.op_type == "Mul") and A > 0
+                    is_scalar_add = is_scalar and (prod.op_type == "Add")
+                    if is_scalar_pos_mul or is_scalar_add:
+                        # if the mul is scalar and positive, we can just delete the
+                        # mul node and rewire the top k node. Because the top k node
+                        # works with probabilities and their relation to each other
+                        # the relation doesn't change if every value is multiplied
+                        # with a scalar
+                        graph.node.remove(prod)
+                        n.input[0] = prod_input
+                        # to avoid error the dataype is set to float32
+                        model.set_tensor_datatype(n.input[0], DataType.FLOAT32)
+                        graph_modified = True
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
+class AbsorbConsecutiveTransposes(Transformation):
+    """Remove (Transpose -> Transpose) patterns when the input and output
+    of the pattern have the same layout."""
+
+    def Are_opposite_permutations(self, perms1, perms2):
+        if len(perms1) != len(perms2):
+            return False
+        assert 0 <= max(perms2) < len(perms2), "invalid permutation"
+        assert 0 <= max(perms1) < len(perms1), "invalid permutation"
+
+        for i, p in enumerate(perms2):
+            if perms1[p] != i:
+                return False
+
+        return True
+
+    def apply(self, model):
+        graph = model.graph
+        graph_modified = False
+        for n in graph.node:
+            if n.op_type == "Transpose":
+                if model.is_fork_node(n):
+                    next_nodes = model.find_direct_successors(n)
+                    perms1 = list(get_by_name(n.attribute, "perm").ints)
+
+                    # check if all nodes after fork are opposite transposes
+                    all_opposite_transposes = True
+                    for next_node in next_nodes:
+                        if next_node is not None and next_node.op_type == "Transpose":
+                            perms2 = list(get_by_name(next_node.attribute, "perm").ints)
+                            if not self.Are_opposite_permutations(perms1, perms2):
+                                all_opposite_transposes = False
+                                break
+                        else:
+                            all_opposite_transposes = False
+                            break
+
+                    if not all_opposite_transposes:
+                        continue
+
+                    prod = model.find_producer(n.input[0])
+                    for next_node in next_nodes:
+                        # connect next_node's consumer input to n's producer output
+                        # TODO implement this to allow for forks as producers and
+                        # joins as consumers
+                        cons = model.find_consumer(next_node.output[0])
+                        cons.input[0] = prod.output[0]
+
+                        # remove consumer transpose
+                        graph.node.remove(next_node)
+
+                    # remove producer transpose
+                    graph.node.remove(n)
+                    graph_modified = True
+
+                else:
+                    next_node = model.find_consumer(n.output[0])
+                    if next_node is not None and next_node.op_type == "Transpose":
+                        perms1 = list(get_by_name(n.attribute, "perm").ints)
+                        perms2 = list(get_by_name(next_node.attribute, "perm").ints)
+                        if self.Are_opposite_permutations(perms1, perms2):
+
+                            # connect next_node's consumer input to n's producer output
+                            # TODO implement this to allow for forks as producers
+                            consumers = model.find_direct_successors(next_node)
+                            prod = model.find_producer(n.input[0])
+                            for cons in consumers:
+                                for cons_in in cons.input:
+                                    if cons_in == next_node.output[0]:
+                                        prod.output[0] = cons_in
+                                        break
+                            # remove both transposes
+                            graph.node.remove(n)
+                            graph.node.remove(next_node)
+
+                            graph_modified = True
+        if graph_modified:
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
diff --git a/src/finn/transformation/streamline/collapse_repeated.py b/src/finn/transformation/streamline/collapse_repeated.py
index aa059747b602bc6b659bc8b53b1f18988bba1ef0..769bed841ce07c1c9c62f762de4b2c0937a6d68f 100644
--- a/src/finn/transformation/streamline/collapse_repeated.py
+++ b/src/finn/transformation/streamline/collapse_repeated.py
@@ -30,6 +30,7 @@ from onnx import helper as oh
 
 from finn.transformation import Transformation
 from finn.transformation.infer_shapes import InferShapes
+from finn.core.datatype import DataType
 
 
 class CollapseRepeatedOp(Transformation):
@@ -48,9 +49,17 @@ class CollapseRepeatedOp(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == self.op_name:
+            if (
+                n.op_type == self.op_name
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
-                if consumer is not None and consumer.op_type == self.op_name:
+                if (
+                    consumer is not None
+                    and consumer.op_type == self.op_name
+                    and not model.is_join_node(consumer)
+                ):
                     op0_param_name = n.input[1]
                     op1_param_name = consumer.input[1]
                     op0_param = model.get_initializer(op0_param_name)
@@ -75,6 +84,9 @@ class CollapseRepeatedOp(Transformation):
                     graph.node.insert(node_ind, new_node)
                     # replace parameter value
                     model.set_initializer(new_node_param_name, new_param)
+                    # be conservative with param/output DataTypes
+                    model.set_tensor_datatype(new_node_param_name, DataType.FLOAT32)
+                    model.set_tensor_datatype(end_name, DataType.FLOAT32)
                     # remove old nodes
                     graph.node.remove(n)
                     graph.node.remove(consumer)
diff --git a/src/finn/transformation/streamline/remove.py b/src/finn/transformation/streamline/remove.py
new file mode 100644
index 0000000000000000000000000000000000000000..ddc4233ddafbc70c4d20d316ea72ea6bba1b82a8
--- /dev/null
+++ b/src/finn/transformation/streamline/remove.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+from finn.transformation import Transformation
+from finn.transformation.infer_shapes import InferShapes
+import numpy as np
+
+class RemoveIdentityOps(Transformation):
+    """Remove identity ops like Add/Sub with zero or Mul/Div with one"""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if (
+                n.op_type in ["Add", "Sub"]
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+                A = model.get_initializer(n.input[1])
+                if A is not None and (A == np.zeros_like(A)).all():
+                    producer = model.find_producer(n.input[0])
+                    # remove node and wire output tensor to
+                    # output of producer node
+                    producer.output[0] = n.output[0]
+                    graph.node.remove(n)
+
+            elif (
+                n.op_type in ["Mul", "Div"]
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+                A = model.get_initializer(n.input[1])
+                if A is not None and (A == np.ones_like(A)).all():
+                    producer = model.find_producer(n.input[0])
+                    # remove node and wire output tensor to
+                    # output of producer node
+                    producer.output[0] = n.output[0]
+                    graph.node.remove(n)
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
diff --git a/src/finn/transformation/streamline/reorder.py b/src/finn/transformation/streamline/reorder.py
index 1fd9ce5108bbe9c317f180680febfc088072b98c..f4c1dc1306b67e5807c25cfb08c961729dbfbdf6 100644
--- a/src/finn/transformation/streamline/reorder.py
+++ b/src/finn/transformation/streamline/reorder.py
@@ -27,17 +27,25 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import numpy as np
+import warnings
 from onnx import helper as oh
+from onnx import TensorProto
 
 from finn.transformation import Transformation
+import finn.core.data_layout as DataLayout
 from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.core.datatype import DataType
 from finn.core.onnx_exec import execute_node
 from finn.util.basic import get_by_name
+from finn.custom_op.registry import getCustomOp
 
 
 class MoveAddPastMul(Transformation):
-    """Move add operations past multiply operations. The aim is to have them
-    next to each other such that they can be collapsed into a single add."""
+    """Move add operations past multiply operations on linear segments of the graph.
+    The aim is to have them next to each other such that they can be collapsed into
+    a single add."""
 
     def apply(self, model):
         graph = model.graph
@@ -45,9 +53,17 @@ class MoveAddPastMul(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Add":
+            if (
+                n.op_type == "Add"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
-                if consumer is not None and consumer.op_type == "Mul":
+                if (
+                    consumer is not None
+                    and consumer.op_type == "Mul"
+                    and not model.is_join_node(consumer)
+                ):
                     # have: (x) -> add(,B) -> (x+B) -> mul(,A) -> (xA+BA)
                     # want: (x) -> mul(,A) -> (xA) -> add(,BA) -> (xA+BA)
                     # assume input 0 is from the previous layer, input 1 is the
@@ -56,19 +72,26 @@ class MoveAddPastMul(Transformation):
                     add_weight_name = n.input[1]
                     A = model.get_initializer(mul_weight_name)
                     B = model.get_initializer(add_weight_name)
-                    assert A is not None, "Initializer for mul weights is not set."
-                    assert B is not None, "Initializer for add weights is not set."
+                    if (A is None) or (B is None):
+                        warnings.warn(
+                            "Mul or add does not have constant params, skipping"
+                        )
+                        continue
                     start_name = n.input[0]
                     middle_name = n.output[0]
                     end_name = consumer.output[0]
                     # compute new param value for add
                     BA = B * A
+
                     # make and insert new nodes
                     new_mul = oh.make_node(
-                        "Mul", [start_name, mul_weight_name], [middle_name]
+                        "Mul",
+                        [start_name, mul_weight_name],
+                        [middle_name],
+                        name=consumer.name,
                     )
                     new_add = oh.make_node(
-                        "Add", [middle_name, add_weight_name], [end_name]
+                        "Add", [middle_name, add_weight_name], [end_name], name=n.name
                     )
                     graph.node.insert(node_ind, new_mul)
                     graph.node.insert(node_ind + 1, new_add)
@@ -78,6 +101,7 @@ class MoveAddPastMul(Transformation):
                     graph.node.remove(n)
                     graph.node.remove(consumer)
                     graph_modified = True
+
         model = model.transform(InferShapes())
         return (model, graph_modified)
 
@@ -92,15 +116,24 @@ class MoveScalarMulPastMatMul(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Mul":
+            if (
+                n.op_type == "Mul"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
-                if consumer is not None and consumer.op_type == "MatMul":
+                if (
+                    consumer is not None
+                    and consumer.op_type == "MatMul"
+                    and not model.is_join_node(consumer)
+                ):
                     mul_weight_name = n.input[1]
                     matmul_weight_name = consumer.input[1]
                     A = model.get_initializer(mul_weight_name)
                     W = model.get_initializer(matmul_weight_name)
-                    assert A is not None, "Initializer for mul weights is not set."
-                    assert W is not None, "Initializer for matmul weights is not set."
+                    if (A is None) or (W is None):
+                        warnings.warn("MatMul or Mul params are not constant, skipping")
+                        continue
                     start_name = n.input[0]
                     middle_name = n.output[0]
                     end_name = consumer.output[0]
@@ -109,10 +142,16 @@ class MoveScalarMulPastMatMul(Transformation):
                         # if the mul is scalar, we can simply swap the order of ops
                         # make and insert new nodes
                         new_matmul = oh.make_node(
-                            "MatMul", [start_name, matmul_weight_name], [middle_name]
+                            "MatMul",
+                            [start_name, matmul_weight_name],
+                            [middle_name],
+                            name=consumer.name,
                         )
                         new_mul = oh.make_node(
-                            "Mul", [middle_name, mul_weight_name], [end_name]
+                            "Mul",
+                            [middle_name, mul_weight_name],
+                            [end_name],
+                            name=n.name,
                         )
                         graph.node.insert(node_ind, new_matmul)
                         graph.node.insert(node_ind + 1, new_mul)
@@ -135,15 +174,24 @@ class MoveScalarAddPastMatMul(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Add":
+            if (
+                n.op_type == "Add"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
-                if consumer is not None and consumer.op_type == "MatMul":
+                if (
+                    consumer is not None
+                    and consumer.op_type == "MatMul"
+                    and not model.is_join_node(consumer)
+                ):
                     add_weight_name = n.input[1]
                     matmul_weight_name = consumer.input[1]
                     A = model.get_initializer(add_weight_name)
                     W = model.get_initializer(matmul_weight_name)
-                    assert A is not None, "Initializer for add weights is not set."
-                    assert W is not None, "Initializer for matmul weights is not set."
+                    if (A is None) or (W is None):
+                        warnings.warn("MatMul or Add params are not constant, skipping")
+                        continue
                     start_name = n.input[0]
                     middle_name = n.output[0]
                     end_name = consumer.output[0]
@@ -155,10 +203,16 @@ class MoveScalarAddPastMatMul(Transformation):
                         # update the add weight
                         model.set_initializer(add_weight_name, Anew)
                         new_matmul = oh.make_node(
-                            "MatMul", [start_name, matmul_weight_name], [middle_name]
+                            "MatMul",
+                            [start_name, matmul_weight_name],
+                            [middle_name],
+                            name=consumer.name,
                         )
                         new_add = oh.make_node(
-                            "Add", [middle_name, add_weight_name], [end_name]
+                            "Add",
+                            [middle_name, add_weight_name],
+                            [end_name],
+                            name=n.name,
                         )
                         graph.node.insert(node_ind, new_matmul)
                         graph.node.insert(node_ind + 1, new_add)
@@ -171,8 +225,8 @@ class MoveScalarAddPastMatMul(Transformation):
         return (model, graph_modified)
 
 
-class MoveScalarAddPastConv(Transformation):
-    """Move scalar add operations past conv operations. We want to have adds
+class MoveAddPastConv(Transformation):
+    """Move scalar and channelwise add operations past conv operations. We want to have adds
     next to each other such that they can be collapsed into a single add."""
 
     def apply(self, model):
@@ -181,24 +235,47 @@ class MoveScalarAddPastConv(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Add":
+            if (
+                n.op_type == "Add"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
-                if consumer is not None and consumer.op_type == "Conv":
+                if (
+                    consumer is not None
+                    and consumer.op_type == "Conv"
+                    and not model.is_join_node(consumer)
+                ):
                     conv_node = consumer
                     add_node = n
                     add_weight_name = n.input[1]
                     conv_in_name = consumer.input[0]
                     conv_in_shape = model.get_tensor_shape(conv_in_name)
+                    # assume datalayout to be NCHW
+                    channels = conv_in_shape[1]
                     A = model.get_initializer(add_weight_name)
-                    assert A is not None, "Initializer for add weights is not set."
+                    if A is None:
+                        warnings.warn("Add param is not constant, skipping")
+                        continue
                     start_name = n.input[0]
                     end_name = consumer.output[0]
                     conv_out_shape = model.get_tensor_shape(end_name)
-                    if all(x == 1 for x in A.shape):
+
+                    using_padding = True
+                    pads = list(get_by_name(consumer.attribute, "pads").ints)
+                    if sum(pads) == 0:
+                        using_padding = False
+                    if (
+                        all(x == 1 for x in A.shape) or A.shape == (1, channels, 1, 1)
+                    ) and not using_padding:
                         # create a tensor filled with the add constant, in
                         # the shape expected by the convolution
                         conv_in_const = np.zeros(conv_in_shape, dtype=np.float32)
-                        conv_in_const.fill(A.item())
+                        if A.shape == (1, channels, 1, 1):
+                            for ch in range(channels):
+                                conv_in_const[0][ch].fill(A[0][ch].item())
+                        else:
+                            conv_in_const.fill(A.item())
                         # create an execution context and put in const input
                         exec_ctx = model.make_empty_exec_context()
                         exec_ctx[conv_in_name] = conv_in_const
@@ -206,7 +283,8 @@ class MoveScalarAddPastConv(Transformation):
                         execute_node(conv_node, exec_ctx, model.graph)
                         # retrieve the conv output
                         Anew = exec_ctx[end_name]
-                        # strip out repetition
+
+                        # strip out repetition if no padding
                         Anew = Anew[0, :, 0, 0].reshape(1, -1, 1, 1)
                         # update the add weight
                         model.set_initializer(add_weight_name, Anew)
@@ -224,6 +302,7 @@ class MoveScalarAddPastConv(Transformation):
                         graph.node.remove(add_node)
                         graph.node.insert(node_ind, add_node)
                         graph_modified = True
+
         model = model.transform(InferShapes())
         return (model, graph_modified)
 
@@ -238,12 +317,22 @@ class MoveScalarMulPastConv(Transformation):
         graph_modified = False
         for n in graph.node:
             node_ind += 1
-            if n.op_type == "Mul":
+            if (
+                n.op_type == "Mul"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
                 consumer = model.find_consumer(n.output[0])
-                if consumer is not None and consumer.op_type == "Conv":
+                if (
+                    consumer is not None
+                    and consumer.op_type == "Conv"
+                    and not model.is_join_node(consumer)
+                ):
                     mul_weight_name = n.input[1]
                     A = model.get_initializer(mul_weight_name)
-                    assert A is not None, "Initializer for mul weights is not set."
+                    if A is None:
+                        warnings.warn("Mul param is not constant, skipping")
+                        continue
                     conv_node = consumer
                     mul_node = n
                     start_name = mul_node.input[0]
@@ -271,6 +360,215 @@ class MoveScalarMulPastConv(Transformation):
         return (model, graph_modified)
 
 
+class MoveMulPastDWConv(Transformation):
+    """Move channelwise mul operations past depthwise conv operations. We want to have muls
+    next to each other such that they can be collapsed into a single mul."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if (
+                n.op_type == "Mul"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+                consumer = model.find_consumer(n.output[0])
+                if (
+                    consumer is not None
+                    and consumer.op_type == "Conv"
+                    and not model.is_join_node(consumer)
+                ):
+                    mul_weight_name = n.input[1]
+                    A = model.get_initializer(mul_weight_name)
+                    if A is None:
+                        warnings.warn(
+                            """Mul weight tensor is not set. If it is a constant,
+                                please use set_initializer to set the tensor."""
+                        )
+                        continue
+                    conv_node = consumer
+                    mul_node = n
+                    start_name = mul_node.input[0]
+                    conv_in_name = conv_node.input[0]
+                    conv_in_shape = model.get_tensor_shape(conv_in_name)
+                    ifm_ch = conv_in_shape[1]
+                    group_attribute = get_by_name(consumer.attribute, "group")
+                    if group_attribute is None:
+                        continue
+                    group_attribute = group_attribute.i
+                    conv_out_name = conv_node.output[0]
+                    conv_out_shape = model.get_tensor_shape(conv_out_name)
+                    if A.shape == (1, ifm_ch, 1, 1) and ifm_ch == group_attribute:
+                        # if the mul is channelwise and conv is depthwise,
+                        # we can simply swap the order of ops
+                        # rewire mul input to be conv input
+                        conv_node.input[0] = start_name
+                        model.set_tensor_shape(start_name, conv_in_shape)
+                        model.set_tensor_datatype(start_name, DataType.FLOAT32)
+                        # use old conv input tensor as conv output
+                        conv_node.output[0] = conv_in_name
+                        model.set_tensor_shape(conv_in_name, conv_out_shape)
+                        model.set_tensor_datatype(conv_in_name, DataType.FLOAT32)
+                        # use new conv output as new mul node input
+                        mul_node.input[0] = conv_in_name
+                        # use old conv output as new mul node output
+                        mul_node.output[0] = conv_out_name
+                        model.set_tensor_datatype(conv_out_name, DataType.FLOAT32)
+                        # move mul node past conv node
+                        graph.node.remove(mul_node)
+                        graph.node.insert(node_ind, mul_node)
+                        graph_modified = True
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
+
+
+class MoveLinearPastEltwiseAdd(Transformation):
+    """Move linear operations (mul, add) past elementwise add operations where possible.
+       Specifically,matches and transforms the following patterns:
+       (x*C) + (y*C) -> (x + y) * C
+       (x+A) + (y+B) -> (x + y) + (A + B)
+       where x and y are dynamic inputs, A, B, C are constant tensors (in general).
+    """
+
+    def move_node(self, graph, n, prod0, prod1, node_ind):
+        # found! move one of the muls to output, remove the other one
+        lin0_in0 = prod0.input[0]
+        lin1_in0 = prod1.input[0]
+        in0 = n.input[0]
+        out = n.output[0]
+        # TODO: check shapes don't change through scalar mul or add
+        # connect the eltwise add inputs to mul inputs
+        n.input[0] = lin0_in0
+        n.input[1] = lin1_in0
+        # connect mul0 output to eltwise add output
+        prod0.output[0] = out
+        # connect the input of mul0 and output of eltwise add together
+        n.output[0] = in0
+        prod0.input[0] = in0
+        # move prod0 node past eltwise add node, and remove prod1
+        graph.node.remove(prod1)
+        graph.node.remove(prod0)
+        graph.node.insert(node_ind - 2, prod0)
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        nodes = [n for n in graph.node]
+        for n in nodes:
+            node_ind += 1
+            if n.op_type == "Add":
+                # check for tensors on both inputs (eltwise add)
+                # scalar add has an initializer on one input
+                in0 = n.input[0]
+                in1 = n.input[1]
+                if in0 is None or in1 is None:
+                    continue
+                A = model.get_initializer(in0)
+                B = model.get_initializer(in1)
+                if A is not None or B is not None:
+                    continue
+                # check for mul with same initializer on both inputs
+                prod0 = model.find_producer(in0)
+                prod1 = model.find_producer(in1)
+                # Also check case when both branches are empty and come
+                # from the same node: (prod0 == prod1)
+                # Other transform should handle that
+                if prod0 is None or prod1 is None or (prod0 == prod1):
+                    continue
+                init0 = model.get_initializer(prod0.input[1])
+                init1 = model.get_initializer(prod1.input[1])
+                # if either initializer is None, skip
+                if init0 is None or init1 is None:
+                    continue
+                if prod0.op_type == "Mul" and prod1.op_type == "Mul":
+                    if np.array_equal(init0, init1):
+                        self.move_node(graph, n, prod0, prod1, node_ind)
+                        node_ind -= 1
+                        graph_modified = True
+                elif prod0.op_type == "Add" and prod1.op_type == "Add":
+                    init = init0 + init1
+                    # update initializer of prod0, which we'll move
+                    model.set_initializer(prod0.input[1], init)
+                    self.move_node(graph, n, prod0, prod1, node_ind)
+                    node_ind -= 1
+                    graph_modified = True
+                else:
+                    continue
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
+
+
+class MoveScalarLinearPastInvariants(Transformation):
+    """Move scalar linear operations (mul, add) past functions which are invariant
+       to them. Specifically, matches and transforms the following patterns:
+       f(x*C) -> f(x) * C
+       f(x+C) -> f(x) + C
+       where x is a dynamic input, C is a constant tensor.
+       Known f which obey this property are: Reshape, Flatten, Transpose,
+       GlobalAveragePool
+    """
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        nodes = [n for n in graph.node]
+        for n in nodes:
+            node_ind += 1
+            if (
+                n.op_type == "GlobalAveragePool"
+                or n.op_type == "Reshape"
+                or n.op_type == "Transpose"
+                or n.op_type == "Flatten"
+            ):
+                in0 = n.input[0]
+                if in0 is None:
+                    continue
+                # find and check producer on our input
+                prod0 = model.find_producer(in0)
+                if prod0 is None:
+                    continue
+
+                if prod0.op_type in ["Mul", "Add", "Div"]:
+                    # check if second input of producer is an initializer
+                    init0 = model.get_initializer(prod0.input[1])
+                    # if either initializer is None, skip
+                    if init0 is None:
+                        continue
+                    # if initializer is not scalar, skip
+                    if np.prod(init0.shape) != 1:
+                        continue
+                    # move prod0 from input to output,
+                    old_prod0_in = prod0.input[0]
+                    old_prod0_out = prod0.output[0]
+                    scalar_op_odt = model.get_tensor_datatype(old_prod0_out)
+                    old_n_out = n.output[0]
+                    in_shape = model.get_tensor_shape(n.input[0])
+                    out_shape = model.get_tensor_shape(n.output[0])
+                    n.input[0] = old_prod0_in
+                    n.output[0] = old_prod0_out
+                    prod0.input[0] = old_prod0_out
+                    prod0.output[0] = old_n_out
+                    model.set_tensor_shape(n.input[0], in_shape)
+                    model.set_tensor_shape(n.output[0], out_shape)
+                    model.set_tensor_shape(prod0.output[0], out_shape)
+                    model.set_tensor_datatype(prod0.output[0], scalar_op_odt)
+                    model.set_tensor_datatype(n.output[0], DataType.FLOAT32)
+                    graph.node.remove(prod0)
+                    graph.node.insert(node_ind - 1, prod0)
+                    graph_modified = True
+                else:
+                    continue
+        if graph_modified:
+            model = model.transform(InferShapes())
+            model = model.transform(InferDataTypes())
+        return (model, graph_modified)
+
+
 class MakeMaxPoolNHWC(Transformation):
     """Convert (MaxPool, NHWCTranpose) into (MaxPoolNHWC)."""
 
@@ -302,3 +600,368 @@ class MakeMaxPoolNHWC(Transformation):
                         graph.node.insert(node_ind - 1, consumer)
                         graph_modified = True
         return (model, graph_modified)
+
+
+class MoveOpPastFork(Transformation):
+    """Move node operations past graph forks. Used when a node before a fork
+     can be merged with nodes in the branches
+    """
+
+    def __init__(self, op_name_list):
+        super().__init__()
+        self.ops_to_move = op_name_list
+
+    def apply(self, model):
+        graph = model.graph
+        graph_modified = False
+        nodes = [n for n in graph.node]
+        node_ind = 0
+        for n in nodes:
+            node_ind += 1
+            if (
+                n.op_type in self.ops_to_move
+                and model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+
+                # Restrict this transform to operations with constant parameters
+                # Assuming parameters is in input 1
+                op_init_param = model.get_initializer(n.input[1])
+                if op_init_param is None:
+                    continue
+
+                # Check case when branches are empty and go
+                # to the same node
+                consumers = model.find_consumers(n.output[0])
+                unique_consumer = True
+                for consum_node in consumers[1:]:
+                    if consumers[0] != consum_node:
+                        unique_consumer = False
+                        break
+
+                if unique_consumer:
+                    continue
+
+                for consumer_node in consumers[1:]:
+                    # create new node
+                    new_param_name = model.make_new_valueinfo_name()
+                    new_output_tensor_name = model.make_new_valueinfo_name()
+                    new_node = oh.make_node(
+                        n.op_type,
+                        [n.input[0], new_param_name],
+                        [new_output_tensor_name],
+                    )
+                    graph.node.insert(node_ind, new_node)
+                    node_ind += 1
+                    model.set_initializer(new_param_name, op_init_param)
+
+                    # change consumer input tensor
+                    graph.node.remove(consumer_node)
+                    for idx, consumer_input in enumerate(consumer_node.input):
+                        if consumer_input == n.output[0]:
+                            consumer_node.input[idx] = new_output_tensor_name
+                            break
+                    else:
+                        raise Exception(
+                            "Consumer should have the current node output as input"
+                        )
+
+                    graph.node.insert(node_ind, consumer_node)
+
+                graph_modified = True
+
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
+
+
+class MoveAddPastFork(MoveOpPastFork):
+    def __init__(self):
+        super().__init__(["Add"])
+
+
+class MoveMulPastFork(MoveOpPastFork):
+    def __init__(self):
+        super().__init__(["Mul"])
+
+
+class MoveLinearPastFork(MoveOpPastFork):
+    def __init__(self):
+        super().__init__(["Add", "Mul"])
+
+
+class MoveMaxPoolPastMultiThreshold(Transformation):
+    """Move MaxPool nodes past MultiThreshold nodes on linear segments of the graph."""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        nodes = [n for n in graph.node]
+        for n in nodes:
+            node_ind += 1
+            if n.op_type == "MaxPool" and not model.is_fork_node(n):
+                consumer = model.find_consumer(n.output[0])
+                pads = get_by_name(n.attribute, "pads")
+                has_padding = False
+                if pads is not None:
+                    pads = list(pads.ints)
+                    has_padding = np.prod(pads) != 0
+                if consumer is not None and consumer.op_type == "MultiThreshold":
+                    mt_out = consumer.output[0]
+                    mt_odt = model.get_tensor_datatype(mt_out)
+                    if mt_odt.signed() and has_padding:
+                        warnings.warn(
+                            "Skipping padded MaxPool + signed-output MultiThreshold"
+                        )
+                        continue
+                    # check for non-decreasing thresholds and nonnegative
+                    # scale factor in MultiThreshold
+                    # otherwise we cannot do the reordering
+                    T = model.get_initializer(consumer.input[1])
+                    T_sorted = np.sort(T, axis=1)
+                    assert (
+                        T == T_sorted
+                    ).all(), "MultiThreshold must have non-decreasing thresholds"
+                    mt_inst = getCustomOp(consumer)
+                    if mt_inst.get_nodeattr("out_scale") < 0:
+                        warnings.warn("Skipping MultiThreshold with negative out_scale")
+                        continue
+
+                    # remove old nodes
+                    graph.node.remove(n)
+                    graph.node.remove(consumer)
+
+                    # swap conections
+                    group_in = n.input[0]
+                    # new tensor because dims change
+                    group_middle = model.make_new_valueinfo_name()
+                    group_out = consumer.output[0]
+
+                    consumer.input[0] = group_in
+                    consumer.output[0] = group_middle
+
+                    n.input[0] = group_middle
+                    n.output[0] = group_out
+
+                    # insert them back in
+                    graph.node.insert(node_ind - 1, consumer)
+                    graph.node.insert(node_ind, n)
+
+                    graph_modified = True
+
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
+
+
+class MoveFlattenPastTopK(Transformation):
+    """Move flatten node past a succeeding topk node, if the "axis" attribute in topk
+    is set to -1 and the data layout before the flatten is NHWC with H=W=1"""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if n.op_type == "Flatten":
+                consumer = model.find_consumer(n.output[0])
+                if consumer is not None and consumer.op_type == "TopK":
+                    axis = get_by_name(consumer.attribute, "axis")
+                    if axis is None or axis.i != -1:
+                        continue
+                    start_name = n.input[0]
+                    data_layout = model.get_tensor_layout(start_name)
+                    if data_layout != DataLayout.NHWC:
+                        warnings.warn(
+                            """Transformation can't be applied. The input
+                            to flatten has to have DataLayout.NHWC"""
+                        )
+                        continue
+                    (b, h, w, c) = model.get_tensor_shape(start_name)
+                    if h != 1 or w != 1:
+                        continue
+                    # get parameter k from topk
+                    k = model.get_tensor_shape(consumer.output[1])[-1]
+
+                    # swap conections
+                    # new tensor because dims change
+                    middle_name = model.make_new_valueinfo_name()
+                    topk_indices = oh.make_tensor_value_info(
+                        middle_name, TensorProto.INT64, [b, h, w, k]
+                    )
+                    end_name = consumer.output[1]
+                    graph.value_info.append(topk_indices)
+
+                    # remove old nodes
+                    graph.node.remove(n)
+                    graph.node.remove(consumer)
+
+                    # set inputs and outputs correctly
+                    consumer.input[0] = start_name
+                    consumer.output[1] = middle_name
+                    model.set_tensor_shape(consumer.output[0], (b, h, w, k))
+
+                    n.input[0] = middle_name
+                    n.output[0] = end_name
+
+                    # insert them back in
+                    graph.node.insert(node_ind - 1, consumer)
+                    graph.node.insert(node_ind, n)
+
+                    graph_modified = True
+
+        model = model.transform(InferShapes())
+        return (model, graph_modified)
+
+
+class MoveFlattenPastAffine(Transformation):
+    """Moves a node that implements a (1, -1) reshape past a MatMul, Mul or Add node."""
+
+    def apply(self, model):
+        graph = model.graph
+        graph_modified = False
+        node_ind = 0
+        for n in graph.node:
+            node_ind += 1
+            if (
+                n.op_type == "Flatten"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+                consumer = model.find_consumer(n.output[0])
+                if (
+                    consumer is not None
+                    and (
+                        consumer.op_type == "MatMul"
+                        or consumer.op_type == "Mul"
+                        or consumer.op_type == "Add"
+                    )
+                    and not model.is_join_node(consumer)
+                ):
+                    # move flatten past operation and rewire tensors
+                    start_name = n.input[0]
+                    # check if datalyout is set to NHWC and H=W=1
+                    datalayout = model.get_tensor_layout(start_name)
+                    if datalayout == DataLayout.NHWC:
+                        (b, h, w, c) = model.get_tensor_shape(start_name)
+                        if h != 1 or w != 1:
+                            warnings.warn(
+                                """The Transformation can only be performed if
+                            H=W=1."""
+                            )
+                            continue
+                    else:
+                        warnings.warn(
+                            """The Transformation can only be performed on
+                            operations that operate on data layout NHWC."""
+                        )
+                        continue
+                    middle_name = n.output[0]
+                    end_name = consumer.output[0]
+                    op_param_name = consumer.input[1]
+                    A = model.get_initializer(op_param_name)
+                    if A is None:
+                        warnings.warn("Param is not constant, skipping")
+                        continue
+                    op_in_dt = model.get_tensor_datatype(consumer.input[0])
+                    op_out_dt = model.get_tensor_datatype(consumer.output[0])
+                    start_shape = model.get_tensor_shape(start_name)
+                    dummy_in = np.random.uniform(low=0, high=1, size=(start_shape))
+
+                    if consumer.op_type == "MatMul":
+                        dummy_out = np.matmul(dummy_in, A)
+                    elif consumer.op_type == "Mul":
+                        dummy_out = dummy_in * A
+                    elif consumer.op_type == "Add":
+                        dummy_out = dummy_in + A
+
+                    new_op = oh.make_node(
+                        consumer.op_type,
+                        [start_name, op_param_name],
+                        [middle_name],
+                        name=consumer.name,
+                    )
+                    new_flatten = oh.make_node("Flatten", [middle_name], [end_name])
+                    graph.node.insert(node_ind, new_op)
+                    graph.node.insert(node_ind + 1, new_flatten)
+                    model.set_tensor_shape(middle_name, dummy_out.shape)
+                    # because a flatten node doesn't change the datatype we need
+                    # only the datatype of the op node
+                    model.set_tensor_datatype(start_name, op_in_dt)
+                    model.set_tensor_datatype(middle_name, op_out_dt)
+                    model.set_tensor_datatype(end_name, op_out_dt)
+                    # set datalayout
+                    model.set_tensor_layout(start_name, DataLayout.NHWC)
+                    model.set_tensor_layout(middle_name, DataLayout.NHWC)
+                    # remove old nodes
+                    graph.node.remove(n)
+                    graph.node.remove(consumer)
+                    graph_modified = True
+
+        model = model.transform(InferShapes())
+        model = model.transform(InferDataTypes())
+        model = model.transform(InferDataLayouts())
+        return (model, graph_modified)
+
+
+class MoveTransposePastScalarMul(Transformation):
+    """Moves a Transpose node past a scalar Mul node"""
+
+    def apply(self, model):
+        graph = model.graph
+        node_ind = 0
+        graph_modified = False
+        for n in graph.node:
+            node_ind += 1
+            if (
+                n.op_type == "Transpose"
+                and not model.is_fork_node(n)
+                and not model.is_join_node(n)
+            ):
+                consumer = model.find_consumer(n.output[0])
+                if (
+                    consumer is not None
+                    and consumer.op_type == "Mul"
+                    and not model.is_join_node(consumer)
+                ):
+                    mul_weight_name = consumer.input[1]
+                    A = model.get_initializer(mul_weight_name)
+                    if A is None:
+                        warnings.warn("Mul param is not constant, skipping")
+                        continue
+                    transp_node = n
+                    mul_node = consumer
+                    start_name = transp_node.input[0]
+                    middle_name = transp_node.output[0]
+                    end_name = mul_node.output[0]
+                    transp_in_shape = model.get_tensor_shape(start_name)
+                    transp_out_shape = model.get_tensor_shape(middle_name)
+                    transp_in_layout = model.get_tensor_layout(start_name)
+                    transp_out_layout = model.get_tensor_layout(middle_name)
+                    if transp_in_layout is None or transp_out_layout is None:
+                        warnings.warn(
+                            """Datalayout is not set for tensors.
+                            Transformation can't be applied."""
+                        )
+                        continue
+                    if all(x == 1 for x in A.shape):
+                        # if the mul is scalar, we can simply swap the order of ops
+                        # rewire transpose input to be mul input
+                        mul_node.input[0] = start_name
+                        model.set_tensor_shape(start_name, transp_in_shape)
+                        model.set_tensor_layout(start_name, transp_in_layout)
+                        mul_node.output[0] = middle_name
+                        model.set_tensor_shape(middle_name, transp_in_shape)
+                        model.set_tensor_layout(middle_name, transp_in_layout)
+                        transp_node.input[0] = middle_name
+                        transp_node.output[0] = end_name
+                        model.set_tensor_shape(end_name, transp_out_shape)
+                        model.set_tensor_layout(end_name, transp_out_layout)
+                        graph.node.remove(transp_node)
+                        graph.node.insert(node_ind, transp_node)
+                        graph_modified = True
+
+        if graph_modified is True:
+            model = model.transform(InferDataLayouts())
+            model = model.transform(InferShapes())
+        return (model, graph_modified)
diff --git a/src/finn/transformation/streamline/round_thresholds.py b/src/finn/transformation/streamline/round_thresholds.py
index c33281d85449c173a4631297fd1d67ac0aed8c81..8626ef40619b067c6672c9017ddcb747998c3f2c 100644
--- a/src/finn/transformation/streamline/round_thresholds.py
+++ b/src/finn/transformation/streamline/round_thresholds.py
@@ -51,10 +51,20 @@ class RoundAndClipThresholds(Transformation):
                     model.set_tensor_datatype(n.input[1], idtype)
                     graph_modified = True
                 if idtype.is_integer() and not idtype.signed() and (Tnew < 0).any():
-                    # clip any negative thresholds
+                    # clip any negative thresholds if input is unsigned
                     Tnew = np.clip(Tnew, 0, None)
                     model.set_initializer(n.input[1], Tnew)
                     # use same datatype as inputs for thresholds
                     model.set_tensor_datatype(n.input[1], idtype)
                     graph_modified = True
+                if idtype.is_integer() and (
+                    (Tnew < (idtype.min() - 1)).any()
+                    or (Tnew > (idtype.max() + 1)).any()
+                ):
+                    # clip any large thresholds to input range + 1
+                    Tnew = np.clip(Tnew, idtype.min() - 1, idtype.max() + 1)
+                    model.set_initializer(n.input[1], Tnew)
+                    # use same datatype as inputs for thresholds
+                    model.set_tensor_datatype(n.input[1], idtype)
+                    graph_modified = True
         return (model, graph_modified)
diff --git a/src/finn/util/basic.py b/src/finn/util/basic.py
index bc413bf665e96be1d58a5de13b0744fd6a80f855..cc759bebb1b856a84e25978d442e460332092d23 100644
--- a/src/finn/util/basic.py
+++ b/src/finn/util/basic.py
@@ -31,6 +31,7 @@ import random
 import string
 import subprocess
 import tempfile
+import warnings
 
 import numpy as np
 
@@ -41,14 +42,42 @@ pynq_part_map = dict()
 pynq_part_map["Ultra96"] = "xczu3eg-sbva484-1-e"
 pynq_part_map["Pynq-Z1"] = "xc7z020clg400-1"
 pynq_part_map["Pynq-Z2"] = "xc7z020clg400-1"
+pynq_part_map["ZCU102"] = "xczu9eg-ffvb1156-2-e"
 pynq_part_map["ZCU104"] = "xczu7ev-ffvc1156-2-e"
 
+# native AXI HP port width (in bits) for PYNQ boards
+pynq_native_port_width = dict()
+pynq_native_port_width["Pynq-Z1"] = 64
+pynq_native_port_width["Pynq-Z2"] = 64
+pynq_native_port_width["Ultra96"] = 128
+pynq_native_port_width["ZCU102"] = 128
+pynq_native_port_width["ZCU104"] = 128
+
+# Alveo device and platform mappings
+alveo_part_map = dict()
+alveo_part_map["U50"] = "xcu50-fsvh2104-2L-e"
+alveo_part_map["U200"] = "xcu200-fsgd2104-2-e"
+alveo_part_map["U250"] = "xcu250-figd2104-2L-e"
+alveo_part_map["U280"] = "xcu280-fsvh2892-2L-e"
+
+alveo_default_platform = dict()
+alveo_default_platform["U50"] = "xilinx_u50_gen3x16_xdma_201920_3"
+alveo_default_platform["U200"] = "xilinx_u200_xdma_201830_2"
+alveo_default_platform["U250"] = "xilinx_u250_xdma_201830_2"
+alveo_default_platform["U280"] = "xilinx_u280_xdma_201920_3"
+
 
 def get_rtlsim_trace_depth():
     """Return the trace depth for rtlsim via PyVerilator. Controllable
     via the RTLSIM_TRACE_DEPTH environment variable. If the env.var. is
     undefined, the default value of 1 is returned. A trace depth of 1
     will only show top-level signals and yield smaller .vcd files.
+
+    The following depth values are of interest for whole-network stitched IP
+    rtlsim:
+    - level 1 shows top-level input/output streams
+    - level 2 shows per-layer input/output streams
+    - level 3 shows per full-layer I/O including FIFO count signals
     """
 
     try:
@@ -57,6 +86,16 @@ def get_rtlsim_trace_depth():
         return 1
 
 
+def get_remote_vivado():
+    """Return the address of the remote Vivado synthesis server as set by the,
+    REMOTE_VIVADO environment variable, otherwise return None"""
+
+    try:
+        return os.environ["REMOTE_VIVADO"]
+    except KeyError:
+        return None
+
+
 def get_num_default_workers():
     """Return the number of workers for parallel transformations. Controllable
     via the NUM_DEFAULT_WORKERS environment variable. If the env.var. is
@@ -82,6 +121,25 @@ def get_finn_root():
         )
 
 
+def get_execution_error_thresh():
+    "Return the max error that is allowed for rounding in FINN execution."
+    try:
+        return float(os.environ["ERROR_THRESH"])
+    except KeyError:
+        return 1e-2
+
+
+def get_sanitize_quant_tensors():
+    """Return whether tensors with quantization annotations should be sanitized.
+    Enabled by default, disabling will yield faster ONNX execution but may give
+    incorrect results. Use with caution."""
+    try:
+        return int(os.environ["SANITIZE_QUANT_TENSORS"])
+    except KeyError:
+        # enabled by default
+        return 1
+
+
 def make_build_dir(prefix=""):
     """Creates a temporary folder with given prefix to be used as a build dir.
     Use this function instead of tempfile.mkdtemp to ensure any generated files
@@ -98,13 +156,19 @@ def make_build_dir(prefix=""):
 
 
 def get_by_name(container, name, name_field="name"):
-    """Return item from container by .name field if it exists, None otherwise"""
+    """Return item from container by .name field if it exists, None otherwise.
+    Will throw an Exception if multiple items are found, since this violates the
+    ONNX standard."""
     names = [getattr(x, name_field) for x in container]
-    try:
-        ind = names.index(name)
-        return container[ind]
-    except ValueError:
+
+    inds = [i for i, e in enumerate(names) if e == name]
+    if len(inds) > 1:
+        raise Exception("Found multiple get_by_name matches, undefined behavior")
+    elif len(inds) == 0:
         return None
+    else:
+        ind = inds[0]
+        return container[ind]
 
 
 def remove_by_name(container, name, name_field="name"):
@@ -201,6 +265,33 @@ def pad_tensor_to_multiple_of(ndarray, pad_to_dims, val=0, distr_pad=False):
     return ret
 
 
+def calculate_matvec_accumulator_range(matrix, vec_dt):
+    """Calculate the minimum and maximum possible result (accumulator) values
+    for a dot product x * A, given matrix A of dims (MW, MH), and vector (1, MW)
+    with datatype vec_dt. Returns (acc_min, acc_max).
+    """
+    min_weight = matrix.min()
+    max_weight = matrix.max()
+    perceptive_field_elems = matrix.shape[0]
+    min_input = vec_dt.min()
+    max_input = vec_dt.max()
+    # calculate minimum and maximum values of accumulator
+    # assume inputs span the whole range of the input datatype
+    acc_min = perceptive_field_elems * min(
+        min_weight * max_input,
+        min_weight * min_input,
+        max_weight * max_input,
+        max_weight * min_input,
+    )
+    acc_max = perceptive_field_elems * max(
+        min_weight * max_input,
+        min_weight * min_input,
+        max_weight * max_input,
+        max_weight * min_input,
+    )
+    return (acc_min, acc_max)
+
+
 def gen_finn_dt_tensor(finn_dt, tensor_shape):
     """Generates random tensor in given shape and with given FINN DataType."""
     if type(tensor_shape) == list:
@@ -241,6 +332,69 @@ def calculate_signed_dot_prod_range(dt_a, dt_b, len):
     return (min_prod, max_prod)
 
 
+def sanitize_quant_values(model, node_tensors, execution_context, check_values=False):
+    """ Sanitize given list of tensors in execution_context by rounding values
+    that are supposed to be integers (as indicated by their quantization
+    annotation). Will raise an assertion if the amount of rounding is too large.
+    Returns the sanitized execution context.
+
+    If check_values is specified, an extra DataType.allowed() check will be
+    performed on any rounded tensors.
+
+    Background:
+    FINN uses floating point tensors as a carrier data type to represent
+    integers. Floating point arithmetic can introduce rounding errors, e.g.
+    (int_num * float_scale) / float_scale is not always equal to int_num.
+    We use this function to ensure that the values that are supposed to be
+    integers are indeed integers.
+    """
+
+    for tensor in node_tensors:
+        dtype = model.get_tensor_datatype(tensor)
+        # floats don't need sanitization, skip to next
+        # introduces less quicker runtime
+        if dtype == DataType.FLOAT32:
+            continue
+        current_values = execution_context[tensor]
+        updated_values = current_values
+        has_to_be_rounded = False
+        # TODO: vectorize with numpy
+        for value in np.nditer(current_values):
+            if not dtype.allowed(value):
+                has_to_be_rounded = True
+                break
+        if has_to_be_rounded:
+            updated_values = np.round(current_values)
+            warnings.warn(
+                "The values of tensor {} can't be represented "
+                "with the set FINN datatype ({}), they will be rounded to match the "
+                "FINN datatype.".format(tensor, dtype)
+            )
+        # check if rounded values are not too far from original values
+        max_error = max(np.abs(current_values - updated_values).flatten())
+        if max_error <= get_execution_error_thresh():
+            if check_values is True:
+                # check again if values can now be represented with set finn datatype
+                # TODO: vectorize with numpy
+                for value in np.nditer(updated_values):
+                    if not dtype.allowed(value):
+                        raise Exception(
+                            """Values can't be represented with set
+                                finn datatype ({}) for input {}""".format(
+                                dtype, tensor
+                            )
+                        )
+            execution_context[tensor] = updated_values
+        else:
+            raise Exception(
+                """Rounding error is too high to match set FINN
+            datatype ({}) for input {}""".format(
+                    dtype, tensor
+                )
+            )
+    return execution_context
+
+
 class CppBuilder:
     """Builds the g++ compiler command to produces the executable of the c++ code
     in code_gen_dir which is passed to the function build() of this class."""
diff --git a/src/finn/util/create.py b/src/finn/util/create.py
new file mode 100644
index 0000000000000000000000000000000000000000..853cdd0d44a05426b34bf1db3caa58d9289b2e9e
--- /dev/null
+++ b/src/finn/util/create.py
@@ -0,0 +1,178 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import numpy as np
+from finn.core.modelwrapper import ModelWrapper
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.util.basic import calculate_signed_dot_prod_range, gen_finn_dt_tensor
+
+
+def hls_random_mlp_maker(layer_spec):
+    """Create an MLP of given specification using HLSCustomOp instances.
+    Generate random weights/thresholds of appropriate size."""
+    ret = []
+    for l in layer_spec:
+        idt = l["idt"]
+        wdt = l["wdt"]
+        mw = l["mw"]
+        mh = l["mh"]
+        act = l["act"]
+        l["W"] = gen_finn_dt_tensor(wdt, (mw, mh))
+        if act is None:
+            # no activation, produce accumulators
+            T = None
+            tdt = None
+            if wdt == DataType.BIPOLAR and idt == DataType.BIPOLAR:
+                odt = DataType.UINT32
+            else:
+                odt = DataType.INT32
+        else:
+            odt = act
+            (min, max) = calculate_signed_dot_prod_range(idt, wdt, mw)
+            n_steps = act.get_num_possible_values() - 1
+            T = np.random.randint(min, max - 1, (mh, n_steps)).astype(np.float32)
+            # provide non-decreasing thresholds
+            T = np.sort(T, axis=1)
+            # generate thresholds for activation
+            if wdt == DataType.BIPOLAR and idt == DataType.BIPOLAR:
+                tdt = DataType.UINT32
+                # bias thresholds to be positive
+                T = np.ceil((T + mw) / 2)
+                assert (T >= 0).all()
+            else:
+                tdt = DataType.INT32
+        l["T"] = T
+        l["tdt"] = tdt
+        l["odt"] = odt
+        ret.append(l)
+
+    return hls_mlp_maker(ret)
+
+
+def hls_mlp_maker(layer_spec):
+    """Create an MLP of given specification using HLSCustomOp instances."""
+
+    current_in_name = ""
+    current_out_name = ""
+    i = 0
+
+    graph = helper.make_graph(nodes=[], name="mlp", inputs=[], outputs=[])
+
+    model = helper.make_model(graph, producer_name="finn")
+    model = ModelWrapper(model)
+
+    for l in layer_spec:
+        current_W_name = "W_%d" % i
+        current_T_name = "T_%d" % i
+        current_in_name = "act_%d" % i
+        current_out_name = "act_%d" % (i + 1)
+
+        W = l["W"]
+        (mw, mh) = W.shape
+        T = l["T"]
+        pe = l["pe"]
+        simd = l["simd"]
+        wdt = l["wdt"]
+        idt = l["idt"]
+        tdt = l["tdt"]
+        odt = l["odt"]
+
+        if i == 0:
+            global_in = helper.make_tensor_value_info(
+                current_in_name, TensorProto.FLOAT, [1, mw]
+            )
+            model.graph.input.append(global_in)
+
+        if i == len(layer_spec) - 1:
+            global_out = helper.make_tensor_value_info(
+                current_out_name, TensorProto.FLOAT, [1, mh]
+            )
+            model.graph.output.append(global_out)
+
+        # there are two ways to implement bipolar weights and inputs for
+        # StreamingFC:
+        # - specify their datatypes as such
+        # - specify their datatypes as BINARY as use binaryXnorMode
+        if wdt == DataType.BIPOLAR and idt == DataType.BIPOLAR:
+            # we'll internally convert weights/inputs to binary and specify the
+            # datatypes as such, and also set the binaryXnorMode attribute to 1
+            export_wdt = DataType.BINARY
+            export_idt = DataType.BINARY
+            binary_xnor_mode = 1
+        else:
+            export_wdt = wdt
+            export_idt = idt
+            binary_xnor_mode = 0
+
+        if T is not None:
+            no_act = 0
+            node_inp_list = [current_in_name, current_W_name, current_T_name]
+            if odt == DataType.BIPOLAR:
+                actval = 0
+            else:
+                actval = odt.min()
+        else:
+            # no thresholds
+            node_inp_list = [current_in_name, current_W_name]
+            actval = 0
+            no_act = 1
+        FCLayer_node = helper.make_node(
+            "StreamingFCLayer_Batch",
+            node_inp_list,
+            [current_out_name],
+            domain="finn",
+            backend="fpgadataflow",
+            resType="ap_resource_lut()",
+            MW=mw,
+            MH=mh,
+            SIMD=simd,
+            PE=pe,
+            inputDataType=export_idt.name,
+            weightDataType=export_wdt.name,
+            outputDataType=odt.name,
+            ActVal=actval,
+            binaryXnorMode=binary_xnor_mode,
+            noActivation=no_act,
+        )
+
+        model.graph.node.append(FCLayer_node)
+        model.set_tensor_datatype(current_in_name, idt)
+        model.set_tensor_datatype(current_out_name, odt)
+        model.set_tensor_datatype(current_W_name, wdt)
+        if binary_xnor_mode:
+            # convert bipolar to binary
+            model.set_initializer(current_W_name, (W + 1) / 2)
+        else:
+            model.set_initializer(current_W_name, W)
+        if T is not None:
+            model.set_tensor_datatype(current_T_name, tdt)
+            model.set_initializer(current_T_name, T)
+        i += 1
+
+    return model
diff --git a/src/finn/util/fpgadataflow.py b/src/finn/util/fpgadataflow.py
index 9a2708439c0fed1e25c0d955af21cd2e9e705446..3fe747a84985b2702ffb1e5855d9071362efebda 100644
--- a/src/finn/util/fpgadataflow.py
+++ b/src/finn/util/fpgadataflow.py
@@ -83,14 +83,28 @@ def pyverilate_stitched_ip(model):
     def file_to_dir(x):
         return os.path.dirname(os.path.realpath(x))
 
+    def file_to_basename(x):
+        return os.path.basename(os.path.realpath(x))
+
     all_verilog_dirs = list(map(file_to_dir, all_verilog_srcs))
-    top_verilog = model.get_metadata_prop("wrapper_filename")
+    all_verilog_files = list(
+        set(
+            filter(
+                lambda x: x.endswith(".v"),
+                list(map(file_to_basename, all_verilog_srcs)),
+            )
+        )
+    )
+    top_module_name = model.get_metadata_prop("wrapper_filename")
+    top_module_name = file_to_basename(top_module_name).strip(".v")
     build_dir = make_build_dir("pyverilator_ipstitched_")
     sim = PyVerilator.build(
-        top_verilog,
+        all_verilog_files,
         verilog_path=all_verilog_dirs,
         build_dir=build_dir,
         trace_depth=get_rtlsim_trace_depth(),
+        top_module_name=top_module_name,
+        auto_eval=False,
     )
     return sim
 
@@ -114,3 +128,91 @@ def is_fpgadataflow_node(node):
                     is_node = True
 
     return is_node
+
+
+def rtlsim_multi_io(sim, io_dict, num_out_values, trace_file=""):
+    """Runs the pyverilator simulation by passing the input values to the simulation,
+    toggle the clock and observing the execution time. Function contains also an
+    observation loop that can abort the simulation if no output value is produced
+    after a set number of cycles. Can handle multiple i/o streams. See function
+    implementation for details on how the top-level signals should be named.
+
+    sim: the PyVerilator object for simulation
+    io_dict: a dict of dicts in the following format:
+            {"inputs" : {"in0" : <input_data>, "in1" : <input_data>},
+             "outputs" : {"out0" : [], "out1" : []} }
+            <input_data> is a list of Python arbitrary-precision ints indicating
+            what data to push into the simulation, and the output lists are
+            similarly filled when the simulation is complete
+    num_out_values: number of total values to be read from the simulation to
+                    finish the simulation and return.
+
+    returns: number of clock cycles elapsed for completion
+
+    """
+
+    if trace_file != "":
+        sim.start_vcd_trace(trace_file)
+
+    for outp in io_dict["outputs"]:
+        sim.io[outp + "_V_V_TREADY"] = 1
+
+    # observe if output is completely calculated
+    # total_cycle_count will contain the number of cycles the calculation ran
+    output_done = False
+    total_cycle_count = 0
+    output_count = 0
+    old_output_count = 0
+
+    # avoid infinite looping of simulation by aborting when there is no change in
+    # output values after 100 cycles
+    no_change_count = 0
+    liveness_threshold = pyverilate_get_liveness_threshold_cycles()
+
+    while not (output_done):
+        for inp in io_dict["inputs"]:
+            inputs = io_dict["inputs"][inp]
+            sim.io[inp + "_V_V_TVALID"] = 1 if len(inputs) > 0 else 0
+            sim.io[inp + "_V_V_TDATA"] = inputs[0] if len(inputs) > 0 else 0
+            if sim.io[inp + "_V_V_TREADY"] == 1 and sim.io[inp + "_V_V_TVALID"] == 1:
+                inputs = inputs[1:]
+            io_dict["inputs"][inp] = inputs
+
+        for outp in io_dict["outputs"]:
+            outputs = io_dict["outputs"][outp]
+            if sim.io[outp + "_V_V_TVALID"] == 1 and sim.io[outp + "_V_V_TREADY"] == 1:
+                outputs = outputs + [sim.io[outp + "_V_V_TDATA"]]
+                output_count += 1
+            io_dict["outputs"][outp] = outputs
+
+        sim.io.ap_clk = 1
+        sim.io.ap_clk = 0
+
+        total_cycle_count = total_cycle_count + 1
+
+        if output_count == old_output_count:
+            no_change_count = no_change_count + 1
+        else:
+            no_change_count = 0
+            old_output_count = output_count
+
+        # check if all expected output words received
+        if output_count == num_out_values:
+            output_done = True
+
+        # end sim on timeout
+        if no_change_count == liveness_threshold:
+            if trace_file != "":
+                sim.flush_vcd_trace()
+                sim.stop_vcd_trace()
+            raise Exception(
+                "Error in simulation! Takes too long to produce output. "
+                "Consider setting the LIVENESS_THRESHOLD env.var. to a "
+                "larger value."
+            )
+
+    if trace_file != "":
+        sim.flush_vcd_trace()
+        sim.stop_vcd_trace()
+
+    return total_cycle_count
diff --git a/src/finn/util/gdrive.py b/src/finn/util/gdrive.py
new file mode 100644
index 0000000000000000000000000000000000000000..c2d9b89e354e42849a82b563fe391b9f6e603f4e
--- /dev/null
+++ b/src/finn/util/gdrive.py
@@ -0,0 +1,60 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import gspread
+import os
+import warnings
+from datetime import datetime
+
+
+def upload_to_end2end_dashboard(data_dict):
+    gdrive_key = "/workspace/finn/gdrive-key/service_account.json"
+    if not os.path.isfile(gdrive_key):
+        warnings.warn("Google Drive key not found, skipping dashboard upload")
+        return
+    gc = gspread.service_account(filename=gdrive_key)
+    spreadsheet = gc.open("finn-end2end-dashboard")
+    worksheet = spreadsheet.get_worksheet(0)
+    keys = list(data_dict.keys())
+    vals = list(data_dict.values())
+    # check against existing header
+    existing_keys = worksheet.row_values(1)
+    if existing_keys != keys:
+        # create new worksheet
+        dtstr = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+        worksheet = spreadsheet.add_worksheet(
+            title="Dashboard " + dtstr, rows=10, cols=len(keys), index=0
+        )
+        # create header row with keys
+        worksheet.update("A1:1", [keys])
+        # freeze and make header bold
+        worksheet.freeze(rows=1)
+        worksheet.format("A1:1", {"textFormat": {"bold": True}})
+    # insert values into new row
+    worksheet.insert_row([], index=2)
+    worksheet.update("A2:2", [vals])
diff --git a/src/finn/util/onnx.py b/src/finn/util/onnx.py
index b9932111d86d7206b23e1d0e49a6aa8451f8ba24..4d7cdd126ededac887639a932c2021ef5f081c02 100644
--- a/src/finn/util/onnx.py
+++ b/src/finn/util/onnx.py
@@ -28,6 +28,7 @@
 
 import numpy as np
 import onnx
+import finn.core.data_layout as DataLayout
 
 
 def valueinfo_to_tensor(vi):
@@ -37,3 +38,38 @@ def valueinfo_to_tensor(vi):
     return np.zeros(
         dims, dtype=onnx.mapping.TENSOR_TYPE_TO_NP_TYPE[vi.type.tensor_type.elem_type]
     )
+
+
+def nchw_to_nhwc(t, model, idx, reverse=False):
+    """Converts between NCHW <-> NHWC layouts for tensor t by inserting a transpose. 
+    If reverse=False, t is assumed NCHW and we insert transpose to convert NCHW -> NHWC
+    If reverse=True, t is assumed NHWC and we insert transpose to convert NHWC -> NCHW.
+    """
+    graph = model.graph
+    # create new NHWC tensor
+    t_shape = model.get_tensor_shape(t)
+    bs = t_shape[0]
+    ch = t_shape[1]
+    height = t_shape[2]
+    width = t_shape[3]
+    t_trans = onnx.helper.make_tensor_value_info(
+        model.make_new_valueinfo_name(),
+        onnx.TensorProto.FLOAT,
+        (bs, height, width, ch),  # NHWC
+    )
+    graph.value_info.append(t_trans)
+    dt = model.get_tensor_datatype(t)
+    t_trans = t_trans.name
+    model.set_tensor_datatype(t_trans, dt)
+    model.set_tensor_layout(t_trans, DataLayout.NHWC)
+    # NCHW <-> NHWC transpose
+    if reverse:
+        t_trans_node = onnx.helper.make_node(
+            "Transpose", [t_trans], [t], perm=[0, 3, 1, 2]
+        )
+    else:
+        t_trans_node = onnx.helper.make_node(
+            "Transpose", [t], [t_trans], perm=[0, 2, 3, 1]
+        )
+    graph.node.insert(idx, t_trans_node)
+    return t_trans
diff --git a/src/finn/util/pytorch.py b/src/finn/util/pytorch.py
new file mode 100644
index 0000000000000000000000000000000000000000..f174c24601578cf827cb0da770f29889344e62b8
--- /dev/null
+++ b/src/finn/util/pytorch.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import torch
+
+from torch.nn import Module, Sequential
+
+
+class Normalize(Module):
+    def __init__(self, mean, std, channels):
+        super(Normalize, self).__init__()
+
+        self.mean = mean
+        self.std = std
+        self.channels = channels
+
+    def forward(self, x):
+        x = x - torch.tensor(self.mean, device=x.device).reshape(1, self.channels, 1, 1)
+        x = x / self.std
+        return x
+
+
+class ToTensor(Module):
+    def __init__(self):
+        super(ToTensor, self).__init__()
+
+    def forward(self, x):
+        x = x / 255
+        return x
+
+
+class NormalizePreProc(Module):
+    def __init__(self, mean, std, channels):
+        super(NormalizePreProc, self).__init__()
+        self.features = Sequential()
+        scaling = ToTensor()
+        self.features.add_module("scaling", scaling)
+        normalize = Normalize(mean, std, channels)
+        self.features.add_module("normalize", normalize)
+
+    def forward(self, x):
+        return self.features(x)
diff --git a/src/finn/util/test.py b/src/finn/util/test.py
index 34edc3cacdecc461d1254c35c026c56ff8813549..32c6a0a3a3bb19b95590181dbe447e82cf9966a2 100644
--- a/src/finn/util/test.py
+++ b/src/finn/util/test.py
@@ -26,49 +26,154 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-import torch
-from models.CNV import CNV
-from models.LFC import LFC
-from models.SFC import SFC
-from models.TFC import TFC
+import onnx
+import onnx.numpy_helper as nph
+import pkg_resources as pk
+from pkgutil import get_data
+from brevitas_examples import bnn_pynq
+import numpy as np
+import pytest
+import warnings
+from finn.core.modelwrapper import ModelWrapper
+import os
+from finn.util.basic import pynq_part_map, alveo_part_map, alveo_default_platform
+from finn.transformation.fpgadataflow.make_zynq_proj import ZynqBuild
+from finn.transformation.fpgadataflow.vitis_build import VitisBuild, VitisOptStrategy
+from finn.custom_op.registry import getCustomOp
+from finn.core.onnx_exec import execute_onnx
 
+# map of (wbits,abits) -> model
+example_map = {
+    ("CNV", 1, 1): bnn_pynq.cnv_1w1a,
+    ("CNV", 1, 2): bnn_pynq.cnv_1w2a,
+    ("CNV", 2, 2): bnn_pynq.cnv_2w2a,
+    ("LFC", 1, 1): bnn_pynq.lfc_1w1a,
+    ("LFC", 1, 2): bnn_pynq.lfc_1w2a,
+    ("SFC", 1, 1): bnn_pynq.sfc_1w1a,
+    ("SFC", 1, 2): bnn_pynq.sfc_1w2a,
+    ("SFC", 2, 2): bnn_pynq.sfc_2w2a,
+    ("TFC", 1, 1): bnn_pynq.tfc_1w1a,
+    ("TFC", 1, 2): bnn_pynq.tfc_1w2a,
+    ("TFC", 2, 2): bnn_pynq.tfc_2w2a,
+}
 
-def get_trained_checkpoint(netname, wbits, abits):
-    """Returns the weights and activations from the FINN Brevitas test networks
-    for given netname and the number of bits for weights and activations"""
-    # TODO get from config instead, hardcoded to Docker path for now
-    nname = "%s_%dW%dA" % (netname, wbits, abits)
-    root = "/workspace/brevitas_cnv_lfc/pretrained_models/%s/checkpoints/best.tar"
-    return root % nname
 
-
-def get_test_model_def_fxn(netname):
-    """Returns the PyTorch model instantation function related to netname."""
-    model_def_map = {"LFC": LFC, "SFC": SFC, "TFC": TFC, "CNV": CNV}
-    return model_def_map[netname]
+def get_test_model(netname, wbits, abits, pretrained):
+    """Returns the model specified by input arguments from the Brevitas BNN-PYNQ
+    test networks. Pretrained weights loaded if pretrained is True."""
+    model_cfg = (netname, wbits, abits)
+    model_def_fxn = example_map[model_cfg]
+    fc = model_def_fxn(pretrained)
+    return fc.eval()
 
 
 def get_test_model_trained(netname, wbits, abits):
-    """Returns the pretrained model specified by input arguments loaded with weights
-    and activations from the FINN Brevitas test networks."""
-    model_def_fxn = get_test_model_def_fxn(netname)
-    checkpoint_loc = get_trained_checkpoint(netname, wbits, abits)
-    if netname == "CNV":
-        ibits = 8
-    else:
-        ibits = abits
-    fc = model_def_fxn(weight_bit_width=wbits, act_bit_width=abits, in_bit_width=ibits)
-    checkpoint = torch.load(checkpoint_loc, map_location="cpu")
-    fc.load_state_dict(checkpoint["state_dict"])
-    return fc.eval()
+    "get_test_model with pretrained=True"
+    return get_test_model(netname, wbits, abits, pretrained=True)
 
 
 def get_test_model_untrained(netname, wbits, abits):
-    """Returns untrained model specified by input arguments."""
-    model_def_fxn = get_test_model_def_fxn(netname)
-    if netname == "CNV":
-        ibits = 8
+    "get_test_model with pretrained=False"
+    return get_test_model(netname, wbits, abits, pretrained=False)
+
+
+def get_topk(vec, k):
+    "Return indices of the top-k values in given array vec (treated as 1D)."
+    return np.flip(vec.flatten().argsort())[:k]
+
+
+def soft_verify_topk(invec, idxvec, k):
+    """Check that the topK indices provided actually point to the topK largest
+    values in the input vector"""
+    np_topk = np.flip(invec.flatten().argsort())[:k]
+    soft_expected = invec.flatten()[np_topk.astype(np.int).flatten()]
+    soft_produced = invec.flatten()[idxvec.astype(np.int).flatten()]
+    return (soft_expected == soft_produced).all()
+
+
+def load_test_checkpoint_or_skip(filename):
+    "Try to load given .onnx and return ModelWrapper, else skip current test."
+    if os.path.isfile(filename):
+        model = ModelWrapper(filename)
+        return model
     else:
-        ibits = abits
-    fc = model_def_fxn(weight_bit_width=wbits, act_bit_width=abits, in_bit_width=ibits)
-    return fc.eval()
+        warnings.warn(filename + " not found from previous test step, skipping")
+        pytest.skip(filename + " not found from previous test step, skipping")
+
+
+def get_build_env(kind, target_clk_ns):
+    """Get board-related build environment for testing.
+    - kind = either zynq or alveo.
+    """
+    ret = {}
+    if kind == "zynq":
+        ret["board"] = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
+        ret["part"] = pynq_part_map[ret["board"]]
+        ret["ip"] = os.getenv("PYNQ_IP", "")
+        ret["username"] = os.getenv("PYNQ_USERNAME", "xilinx")
+        ret["password"] = os.getenv("PYNQ_PASSWORD", "xilinx")
+        ret["port"] = os.getenv("PYNQ_PORT", 22)
+        ret["target_dir"] = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
+        ret["build_fxn"] = ZynqBuild(ret["board"], target_clk_ns)
+    elif kind == "alveo":
+        ret["board"] = os.getenv("ALVEO_BOARD", default="U250")
+        ret["part"] = alveo_part_map[ret["board"]]
+        ret["platform"] = alveo_default_platform[ret["board"]]
+        ret["ip"] = os.getenv("ALVEO_IP", "")
+        ret["username"] = os.getenv("ALVEO_USERNAME", "")
+        ret["password"] = os.getenv("ALVEO_PASSWORD", "")
+        ret["port"] = os.getenv("ALVEO_PORT", 22)
+        ret["target_dir"] = os.getenv("ALVEO_TARGET_DIR", "/tmp/finn_alveo_deploy")
+        ret["build_fxn"] = VitisBuild(
+            ret["part"],
+            target_clk_ns,
+            ret["platform"],
+            strategy=VitisOptStrategy.BUILD_SPEED,
+        )
+    else:
+        raise Exception("Unknown test build environment spec")
+    return ret
+
+
+def get_example_input(topology):
+    "Get example numpy input tensor for given topology."
+
+    if "fc" in topology:
+        raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
+        onnx_tensor = onnx.load_tensor_from_string(raw_i)
+        return nph.to_array(onnx_tensor)
+    elif topology == "cnv":
+        fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
+        input_tensor = np.load(fn)["arr_0"].astype(np.float32)
+        return input_tensor
+    else:
+        raise Exception("Unknown topology, can't return example input")
+
+
+def get_trained_network_and_ishape(topology, wbits, abits):
+    "Return (trained_model, shape) for given BNN-PYNQ test config."
+
+    topology_to_ishape = {
+        "tfc": (1, 1, 28, 28),
+        "cnv": (1, 3, 32, 32),
+    }
+    ishape = topology_to_ishape[topology]
+    model = get_test_model_trained(topology.upper(), wbits, abits)
+    return (model, ishape)
+
+
+def execute_parent(parent_path, child_path, input_tensor_npy, return_full_ctx=False):
+    """Execute parent model containing a single StreamingDataflowPartition by
+    replacing it with the model at child_path and return result."""
+
+    parent_model = load_test_checkpoint_or_skip(parent_path)
+    iname = parent_model.graph.input[0].name
+    oname = parent_model.graph.output[0].name
+    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
+    sdp_node = getCustomOp(sdp_node)
+    sdp_node.set_nodeattr("model", child_path)
+    ret = execute_onnx(parent_model, {iname: input_tensor_npy}, True)
+    if return_full_ctx:
+        return ret
+    else:
+        return ret[oname]
diff --git a/src/finn/util/vcd.py b/src/finn/util/vcd.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4400f7bd7e75549189f081ce255fd67c49b3746
--- /dev/null
+++ b/src/finn/util/vcd.py
@@ -0,0 +1,191 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from vcdvcd import VCDVCD
+from finn.util.basic import get_num_default_workers
+import multiprocessing as mp
+
+# string patterns to search for to find particular interfaces
+# streaming interfaces
+vname = "TVALID"
+rname = "TREADY"
+# FIFO count signals
+fifo_mod_name = "StreamingFIFO"
+fifo_cname = "count"
+
+
+def list_stream_if(vcd_file):
+    "Return a list of stream  interface names from given vcd trace."
+
+    sig_names = VCDVCD(vcd_file, print_dumps=False, only_sigs=True).get_signals()
+    stream_if_names = []
+    for cand_name in filter(lambda x: x.endswith(vname), sig_names):
+        base_name = cand_name.replace(vname, "")
+        if base_name + rname in sig_names:
+            stream_if_names.append(base_name)
+    return stream_if_names
+
+
+def list_fifo_count_signals(vcd_file):
+    "Return a list of FIFO count signal names from given vcd trace."
+
+    sig_names = VCDVCD(vcd_file, print_dumps=False, only_sigs=True).get_signals()
+    fifo_cnt_names = []
+    for cand_name in filter(lambda x: fifo_cname in x, sig_names):
+        if fifo_mod_name in cand_name:
+            fifo_cnt_names.append(cand_name)
+    return fifo_cnt_names
+
+
+def get_fifo_count_max(vcd_file, fifo_count_signal):
+    "Return the maximum value of the given FIFO count signal in vcd trace."
+
+    d = VCDVCD(vcd_file, signals=[fifo_count_signal], store_tvs=True).get_data()
+    assert len(d) != 0, "FIFO count signal not found"
+    events = list(d.values())[0]["tv"]
+    max = 0
+    for (time, val) in events:
+        current = int(val, base=2)
+        if current > max:
+            max = current
+    return max
+
+
+def _get_fifo_max(x):
+    return (x[0], get_fifo_count_max(x[1], x[0]))
+
+
+def get_all_fifo_count_max(vcd_file, fifo_count_signals=None):
+    """Return a list of max FIFO counts. If fifo_count_signals is None,
+    all FIFO count signals will be returned, otherwise treated as a list of
+    signal names to return the stats for."""
+    if fifo_count_signals is None:
+        fifo_count_signals = list_fifo_count_signals(vcd_file)
+
+    with mp.Pool(get_num_default_workers()) as p:
+        fifo_count_signals = map(lambda x: (x, vcd_file), fifo_count_signals)
+        all_stats = p.map(_get_fifo_max, fifo_count_signals)
+
+    return all_stats
+
+
+def get_stream_if_stats(vcd_file, if_base_name):
+    """Return statistics for given streaming interface in vcd trace in the
+    following dict format:
+
+    <stream_state>: (<num_samples>, <fraction_of_time>),
+
+    where <stream_state> is the combination of (V)alid/(R)eady values,
+    <num_samples> is the approximate number of rising clock edges spent in <state>
+    , and <fraction_of_time> is the fraction of <num_samples> to total
+    amount of time recorded by the trace.
+
+    Example:
+    {"{'V': 0, 'R': 0}": (5, 0.0006060606060606061),
+     "{'V': 1, 'R': 0}": (0, 0.0),
+     "{'V': 0, 'R': 1}": (7605, 0.9218181818181819),
+     "{'V': 1, 'R': 1}": (640, 0.07757575757575758)}
+
+    Here we can see the stream was transmitting values 7.7% of the time,
+    and 9.2% of the time there was no incoming data (valid 0, ready 1)
+    """
+    if_valid = if_base_name + vname
+    if_ready = if_base_name + rname
+    v = VCDVCD(vcd_file, signals=[if_valid], store_tvs=True)
+    endtime = v.get_endtime()
+    v = v.get_data()
+    assert len(v) != 0, "Streaming interface not found"
+    v = list(v.values())[0]["tv"]
+    v = list(map(lambda x: ("V", x[0], x[1]), v))
+    v.append(("V", endtime, "0"))
+    r = VCDVCD(vcd_file, signals=[if_ready], store_tvs=True).get_data()
+    assert len(r) != 0, "Streaming interface not found"
+    r = list(r.values())[0]["tv"]
+    r = list(map(lambda x: ("R", x[0], x[1]), r))
+    r.append(("R", endtime, "0"))
+    events = sorted(v + r, key=lambda x: x[1])
+    ret = {
+        "{'V': 0, 'R': 0}": 0,
+        "{'V': 1, 'R': 0}": 0,
+        "{'V': 0, 'R': 1}": 0,
+        "{'V': 1, 'R': 1}": 0,
+    }
+    status = {"V": 0, "R": 0}
+    last_time = 0
+    total_rising_clock_edges = 0
+    for (sig, time, val) in events:
+        # pyverilator generates 5 time units per sample
+        time = time / 5
+        # pyverilator generates 4 samples per clock period
+        n_rising_clock_edges = int((time - last_time) / 4)
+        # note that the calculation of n_rising_clock_edges is approximate
+        # doing this exactly would require a cycle-by-cycle walkthrough of the
+        # trace, which can take very long
+        ret[str(status)] += n_rising_clock_edges
+        total_rising_clock_edges += n_rising_clock_edges
+        status[sig] = int(val)
+        last_time = time
+
+    for state in ret:
+        v = ret[state]
+        ret[state] = (v, v / total_rising_clock_edges)
+
+    return ret
+
+
+def _get_stats(x):
+    return (x[0], get_stream_if_stats(x[1], x[0]))
+
+
+def get_all_stream_if_stats(vcd_file, stream_ifs=None, sort_by="{'V': 1, 'R': 0}", num_workers=None):
+    """Return a list of streaming interface stats, sorted by the percentage
+    for the given sort_by key. If stream_ifs is None, all streaming interface
+    stats will be returned, otherwise treated as a list of interface names to
+    return the stats for.
+    By default the number of parallel workers from the environment variable
+    NUM_DEFAULT_WORKERS will be used. This behavior can be changed on a per
+    call basis by supplying the optional parameter: num_workers
+    """
+
+    if stream_ifs is None:
+        stream_ifs = list_stream_if(vcd_file)
+
+    if num_workers is None:
+        num_workers = get_num_default_workers()
+
+    with mp.Pool(num_workers) as p:
+        stream_ifs = map(lambda x: (x, vcd_file), stream_ifs)
+        all_stats = p.map(_get_stats, stream_ifs)
+
+    def sort_key(x):
+        stat = x[1]
+        (samples, percent) = stat[sort_by]
+        return percent
+
+    ret = sorted(all_stats, key=sort_key)
+    return ret
diff --git a/src/finn/util/vivado.py b/src/finn/util/vivado.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b6df3940cfeeed292345382471719c49f725de6
--- /dev/null
+++ b/src/finn/util/vivado.py
@@ -0,0 +1,147 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import subprocess
+import stat
+from finn.util.basic import get_remote_vivado
+
+
+def which(program):
+    "Python equivalent of the shell cmd 'which'."
+
+    # source:
+    # https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
+    def is_exe(fpath):
+        return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+    fpath, fname = os.path.split(program)
+    if fpath:
+        if is_exe(program):
+            return program
+    else:
+        for path in os.environ["PATH"].split(os.pathsep):
+            exe_file = os.path.join(path, program)
+            if is_exe(exe_file):
+                return exe_file
+
+    return None
+
+
+def out_of_context_synth(
+    verilog_dir,
+    top_name,
+    fpga_part="xczu3eg-sbva484-1-e",
+    clk_name="ap_clk_0",
+    clk_period_ns=5.0,
+    remote_server=get_remote_vivado(),
+):
+    "Run out-of-context Vivado synthesis, return resources and slack."
+
+    # ensure that the OH_MY_XILINX envvar is set
+    if "OHMYXILINX" not in os.environ:
+        raise Exception("The environment variable OHMYXILINX is not defined.")
+    # ensure that vivado is in PATH: source $VIVADO_PATH/settings64.sh
+    if which("vivado") is None:
+        raise Exception("vivado is not in PATH, ensure settings64.sh is sourced.")
+    omx_path = os.environ["OHMYXILINX"]
+    if remote_server is None:
+        script = "vivadocompile.sh"
+    else:
+        script = "vivadoprojgen.sh"
+    # vivadocompile.sh <top-level-entity> <clock-name (optional)> <fpga-part (optional)>
+    call_omx = "zsh %s/%s %s %s %s %f" % (
+        omx_path,
+        script,
+        top_name,
+        clk_name,
+        fpga_part,
+        float(clk_period_ns),
+    )
+    call_omx = call_omx.split()
+    proc = subprocess.Popen(
+        call_omx, cwd=verilog_dir, stdout=subprocess.PIPE, env=os.environ
+    )
+    proc.communicate()
+
+    vivado_proj_folder = "%s/results_%s" % (verilog_dir, top_name)
+    res_counts_path = vivado_proj_folder + "/res.txt"
+    if remote_server is not None:
+        print("Using remote Vivado OOC synth, remote server %s" % remote_server)
+        run_synth = """
+#!/bin/bash
+which vivado;
+cd %s;
+vivado -mode tcl -source %s.tcl -tclargs %s;
+cat %s
+        """ % (
+            vivado_proj_folder,
+            top_name,
+            top_name,
+            res_counts_path,
+        )
+        with open(vivado_proj_folder + "/run.sh", "w") as f:
+            f.write(run_synth)
+        st = os.stat(vivado_proj_folder + "/run.sh")
+        os.chmod(vivado_proj_folder + "/run.sh", st.st_mode | stat.S_IEXEC)
+        # note that this assumes the same temp folder can be created on the
+        # remote server
+        # note we set target path as / due to use of -R (relative)
+        remote_server_uri = remote_server + ":/"
+        copy_files = "rsync -avzR %s %s" % (verilog_dir + "/", remote_server_uri)
+        copy_files = copy_files.split()
+        proc = subprocess.Popen(copy_files, cwd=verilog_dir, env=os.environ)
+        proc.communicate()
+        vivado_cmd = "bash -ic %s/run.sh" % vivado_proj_folder
+        run_vivado = ["ssh", "-t", remote_server, vivado_cmd]
+        proc = subprocess.Popen(run_vivado, cwd=verilog_dir, env=os.environ)
+        proc.communicate()
+        remote_server_result = remote_server + ":" + res_counts_path
+        copy_results = "rsync -avz %s %s" % (remote_server_result, res_counts_path)
+        copy_results = copy_results.split()
+        proc = subprocess.Popen(copy_results, cwd=verilog_dir, env=os.environ)
+        proc.communicate()
+
+    with open(res_counts_path, "r") as myfile:
+        res_data = myfile.read().split("\n")
+    ret = {}
+    ret["vivado_proj_folder"] = vivado_proj_folder
+    for res_line in res_data:
+        res_fields = res_line.split("=")
+        print(res_fields)
+        try:
+            ret[res_fields[0]] = float(res_fields[1])
+        except ValueError:
+            ret[res_fields[0]] = 0
+        except IndexError:
+            ret[res_fields[0]] = 0
+    if ret["WNS"] == 0:
+        ret["fmax_mhz"] = 0
+    else:
+        ret["fmax_mhz"] = 1000.0 / (clk_period_ns - ret["WNS"])
+    return ret
diff --git a/tests/analysis/test_topology_checks.py b/tests/analysis/test_topology_checks.py
index 41fbdb6cac8e81d6b1e3eed54a71d0e1d43c3adc..7f7f800da05e38fefa9350928ab6ddc94acbe2b6 100644
--- a/tests/analysis/test_topology_checks.py
+++ b/tests/analysis/test_topology_checks.py
@@ -26,11 +26,13 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import os
 from pkgutil import get_data
 
 import onnx.helper as oh
 from onnx import TensorProto
-
+import brevitas.onnx as bo
+from finn.util.test import get_test_model_trained
 import finn.analysis.topology as ta
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.infer_shapes import InferShapes
@@ -88,3 +90,116 @@ def test_node_inputs_in_expected_order():
     # this model has an (unnecessary) dynamic reshape for its weight tensor
     # and so it fails the check
     assert ret["node_inputs_in_expected_order"] is False
+
+
+def test_nodes_topologically_sorted():
+    # test analysis pass (nodes_topologically_sorted) with different models
+
+    # test with data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx
+    raw_m = get_data(
+        "finn", "data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx"
+    )
+    model = ModelWrapper(raw_m)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is True
+
+    # remove first node and add it at the end
+    graph = model.graph
+    first_node = graph.node[0]
+    graph.node.remove(first_node)
+    graph.node.append(first_node)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is False
+
+    # test with data/onnx/mnist-conv/model.onnx
+    raw_m = get_data("finn", "data/onnx/mnist-conv/model.onnx")
+    model = ModelWrapper(raw_m)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is True
+
+    # remove first node and add it at the end
+    graph = model.graph
+    first_node = graph.node[0]
+    graph.node.remove(first_node)
+    graph.node.append(first_node)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is False
+
+    # test with manually created small network
+    Neg_node = oh.make_node("Neg", inputs=["in1"], outputs=["neg1"])
+    Round_node = oh.make_node("Round", inputs=["neg1"], outputs=["round1"])
+
+    Ceil_node = oh.make_node("Ceil", inputs=["neg1"], outputs=["ceil1"])
+    Add_node = oh.make_node("Add", inputs=["round1", "ceil1"], outputs=["out1"])
+
+    in1 = oh.make_tensor_value_info("in1", TensorProto.FLOAT, [4, 4])
+    out1 = oh.make_tensor_value_info("out1", TensorProto.FLOAT, [4, 4])
+
+    graph = oh.make_graph(
+        nodes=[Neg_node, Round_node, Ceil_node, Add_node],
+        name="simple_graph",
+        inputs=[in1],
+        outputs=[out1],
+        value_info=[
+            oh.make_tensor_value_info("neg1", TensorProto.FLOAT, [4, 4]),
+            oh.make_tensor_value_info("round1", TensorProto.FLOAT, [4, 4]),
+            oh.make_tensor_value_info("ceil1", TensorProto.FLOAT, [4, 4]),
+        ],
+    )
+
+    onnx_model = oh.make_model(graph, producer_name="simple-model")
+    model = ModelWrapper(onnx_model)
+
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is True
+
+    # create same graph but with "wrong" node order
+    graph = oh.make_graph(
+        nodes=[Round_node, Ceil_node, Neg_node, Add_node],
+        name="simple_graph",
+        inputs=[in1],
+        outputs=[out1],
+        value_info=[
+            oh.make_tensor_value_info("neg1", TensorProto.FLOAT, [4, 4]),
+            oh.make_tensor_value_info("round1", TensorProto.FLOAT, [4, 4]),
+            oh.make_tensor_value_info("ceil1", TensorProto.FLOAT, [4, 4]),
+        ],
+    )
+
+    onnx_model = oh.make_model(graph, producer_name="simple-model")
+    model = ModelWrapper(onnx_model)
+
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is False
+
+    # test with data/onnx/finn-hls-model/finn-hls-onnx-model.onnx
+    raw_m = get_data("finn", "data/onnx/finn-hls-model/finn-hls-onnx-model.onnx")
+    model = ModelWrapper(raw_m)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is True
+
+    # remove first node and add it at the end
+    graph = model.graph
+    first_node = graph.node[0]
+    graph.node.remove(first_node)
+    graph.node.append(first_node)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is False
+
+    # test with cnv_w1a1
+    build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
+    cnv = get_test_model_trained("CNV", 1, 1)
+    bo.export_finn_onnx(
+        cnv, (1, 3, 32, 32), build_dir + "/end2end_cnv_w1a1_export.onnx"
+    )
+    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_export.onnx")
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is True
+
+    # remove first node and add it at the end
+    graph = model.graph
+    first_node = graph.node[0]
+    graph.node.remove(first_node)
+    graph.node.append(first_node)
+    ret = model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"] is False
diff --git a/tests/brevitas/test_brevitas_QConv2d.py b/tests/brevitas/test_brevitas_QConv2d.py
new file mode 100644
index 0000000000000000000000000000000000000000..198f1e7961a9e160589989b8b34b45b5fda53817
--- /dev/null
+++ b/tests/brevitas/test_brevitas_QConv2d.py
@@ -0,0 +1,76 @@
+import pytest
+import os
+import numpy as np
+import torch
+import brevitas.onnx as bo
+from brevitas.nn import QuantConv2d
+from brevitas.core.restrict_val import RestrictValueType
+from brevitas.core.quant import QuantType
+from brevitas.core.scaling import ScalingImplType
+from brevitas.core.stats import StatsOp
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+import finn.core.onnx_exec as oxe
+from finn.transformation.infer_shapes import InferShapes
+from finn.util.basic import gen_finn_dt_tensor
+
+export_onnx_path = "test_brevitas_conv.onnx"
+
+
+@pytest.mark.parametrize("dw", [False, True])
+@pytest.mark.parametrize("in_channels", [32])
+def test_brevitas_QConv2d(dw, in_channels):
+    ishape = (1, 32, 111, 111)
+    if dw is True:
+        groups = in_channels
+        out_channels = in_channels
+        kernel_size = 3
+        padding = 1
+        stride = 1
+        w_shape = (32, 1, 3, 3)
+
+    else:
+        groups = 1
+        out_channels = 64
+        kernel_size = 1
+        padding = 0
+        stride = 1
+        w_shape = (64, 32, 1, 1)
+
+    b_conv = QuantConv2d(
+        in_channels=in_channels,
+        out_channels=out_channels,
+        groups=groups,
+        kernel_size=kernel_size,
+        padding=padding,
+        stride=stride,
+        bias=False,
+        bias_quant_type=QuantType.FP,
+        compute_output_bit_width=False,
+        compute_output_scale=False,
+        weight_bit_width=4,
+        weight_quant_type=QuantType.INT,
+        weight_scaling_impl_type=ScalingImplType.STATS,
+        weight_scaling_stats_op=StatsOp.MAX,
+        weight_scaling_per_output_channel=True,
+        weight_restrict_scaling_type=RestrictValueType.LOG_FP,
+        weight_narrow_range=True,
+        weight_scaling_min_val=2e-16,
+    )
+    weight_tensor = gen_finn_dt_tensor(DataType.INT4, w_shape)
+    b_conv.weight = torch.nn.Parameter(torch.from_numpy(weight_tensor).float())
+
+    bo.export_finn_onnx(b_conv, ishape, export_onnx_path)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    inp_tensor = np.random.uniform(low=-1.0, high=1.0, size=ishape).astype(np.float32)
+    idict = {model.graph.input[0].name: inp_tensor}
+    odict = oxe.execute_onnx(model, idict, True)
+    produced = odict[model.graph.output[0].name]
+    inp_tensor = torch.from_numpy(inp_tensor).float()
+    b_conv.eval()
+    expected = b_conv.forward(inp_tensor).detach().numpy()
+
+    assert np.isclose(produced, expected, atol=1e-3).all()
+    os.remove(export_onnx_path)
diff --git a/tests/brevitas/test_brevitas_avg_pool_export.py b/tests/brevitas/test_brevitas_avg_pool_export.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3d6c5dde7179bec8fe97e2a6c791afb5733514c
--- /dev/null
+++ b/tests/brevitas/test_brevitas_avg_pool_export.py
@@ -0,0 +1,101 @@
+import os
+
+import onnx  # noqa
+import torch
+import numpy as np
+import brevitas.onnx as bo
+from brevitas.nn import QuantAvgPool2d
+from brevitas.quant_tensor import pack_quant_tensor
+from brevitas.core.quant import QuantType
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.util.basic import gen_finn_dt_tensor
+import finn.core.onnx_exec as oxe
+
+import pytest
+
+export_onnx_path = "test_brevitas_avg_pool_export.onnx"
+
+
+@pytest.mark.parametrize("kernel_size", [2, 3])
+@pytest.mark.parametrize("stride", [1, 2])
+@pytest.mark.parametrize("signed", [False, True])
+@pytest.mark.parametrize("bit_width", [2, 4])
+@pytest.mark.parametrize("input_bit_width", [4, 8, 16])
+@pytest.mark.parametrize("channels", [2, 4])
+@pytest.mark.parametrize("idim", [7, 8])
+def test_brevitas_avg_pool_export(
+    kernel_size, stride, signed, bit_width, input_bit_width, channels, idim
+):
+    ishape = (1, channels, idim, idim)
+    ibw_tensor = torch.Tensor([input_bit_width])
+
+    b_avgpool = QuantAvgPool2d(
+        kernel_size=kernel_size,
+        stride=stride,
+        bit_width=bit_width,
+        quant_type=QuantType.INT,
+    )
+    # call forward pass manually once to cache scale factor and bitwidth
+    input_tensor = torch.from_numpy(np.zeros(ishape)).float()
+    scale = np.ones((1, channels, 1, 1))
+    output_scale = torch.from_numpy(scale).float()
+    input_quant_tensor = pack_quant_tensor(
+        tensor=input_tensor, scale=output_scale, bit_width=ibw_tensor, signed=signed
+    )
+    bo.export_finn_onnx(b_avgpool, ishape, export_onnx_path, input_t=input_quant_tensor)
+    model = ModelWrapper(export_onnx_path)
+
+    # determine input FINN datatype
+    if signed is True:
+        prefix = "INT"
+    else:
+        prefix = "UINT"
+    dt_name = prefix + str(input_bit_width)
+    dtype = DataType[dt_name]
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+
+    # execution with input tensor using integers and scale = 1
+    # calculate golden output
+    inp = gen_finn_dt_tensor(dtype, ishape)
+    input_tensor = torch.from_numpy(inp).float()
+    input_quant_tensor = pack_quant_tensor(
+        tensor=input_tensor, scale=output_scale, bit_width=ibw_tensor, signed=signed
+    )
+    b_avgpool.eval()
+    expected = b_avgpool.forward(input_quant_tensor).tensor.detach().numpy()
+
+    # finn execution
+    idict = {model.graph.input[0].name: inp}
+    odict = oxe.execute_onnx(model, idict, True)
+    produced = odict[model.graph.output[0].name]
+    assert (expected == produced).all()
+
+    # execution with input tensor using float and scale != 1
+    scale = np.random.uniform(low=0, high=1, size=(1, channels, 1, 1)).astype(
+        np.float32
+    )
+    inp_tensor = inp * scale
+    input_tensor = torch.from_numpy(inp_tensor).float()
+    input_scale = torch.from_numpy(scale).float()
+    input_quant_tensor = pack_quant_tensor(
+        tensor=input_tensor, scale=input_scale, bit_width=ibw_tensor, signed=signed
+    )
+    # export again to set the scale values correctly
+    bo.export_finn_onnx(b_avgpool, ishape, export_onnx_path, input_t=input_quant_tensor)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    b_avgpool.eval()
+    expected = b_avgpool.forward(input_quant_tensor).tensor.detach().numpy()
+    # finn execution
+    idict = {model.graph.input[0].name: inp_tensor}
+    odict = oxe.execute_onnx(model, idict, True)
+    produced = odict[model.graph.output[0].name]
+
+    assert np.isclose(expected, produced).all()
+
+    os.remove(export_onnx_path)
diff --git a/tests/brevitas/test_brevitas_cnv.py b/tests/brevitas/test_brevitas_cnv.py
index c04e16ad1923609c81240235057cc7a190c90ffb..120c67646de08a1a9875b76bedd3a0130792b487 100644
--- a/tests/brevitas/test_brevitas_cnv.py
+++ b/tests/brevitas/test_brevitas_cnv.py
@@ -38,11 +38,10 @@ import finn.core.onnx_exec as oxe
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fold_constants import FoldConstants
 from finn.transformation.infer_shapes import InferShapes
-from finn.transformation.general import GiveUniqueNodeNames
-from finn.transformation.double_to_single_float import DoubleToSingleFloat
+from finn.transformation.general import GiveUniqueNodeNames, RemoveStaticGraphInputs
 from finn.util.test import get_test_model_trained
 
-export_onnx_path = "test_output_cnv.onnx"
+export_onnx_path = "test_brevitas_cnv.onnx"
 
 
 @pytest.mark.parametrize("abits", [1, 2])
@@ -54,9 +53,11 @@ def test_brevitas_cnv_export_exec(wbits, abits):
     bo.export_finn_onnx(cnv, (1, 3, 32, 32), export_onnx_path)
     model = ModelWrapper(export_onnx_path)
     model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(DoubleToSingleFloat())
     model = model.transform(InferShapes())
     model = model.transform(FoldConstants())
+    model = model.transform(RemoveStaticGraphInputs())
+    assert len(model.graph.input) == 1
+    assert len(model.graph.output) == 1
     fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
     input_tensor = np.load(fn)["arr_0"].astype(np.float32)
     input_tensor = input_tensor / 255
diff --git a/tests/brevitas/test_brevitas_debug.py b/tests/brevitas/test_brevitas_debug.py
new file mode 100644
index 0000000000000000000000000000000000000000..50d0ca44cd0befe5d08b5c1b45edf602457bda19
--- /dev/null
+++ b/tests/brevitas/test_brevitas_debug.py
@@ -0,0 +1,79 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from pkgutil import get_data
+
+import os
+import brevitas.onnx as bo
+import numpy as np
+import onnx
+import onnx.numpy_helper as nph
+import torch
+
+import finn.core.onnx_exec as oxe
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fold_constants import FoldConstants
+from finn.transformation.general import RemoveStaticGraphInputs
+from finn.transformation.infer_shapes import InferShapes
+from finn.util.test import get_test_model_trained
+
+
+def test_brevitas_debug():
+    finn_onnx = "test_brevitas_debug.onnx"
+    fc = get_test_model_trained("TFC", 2, 2)
+    dbg_hook = bo.enable_debug(fc)
+    bo.export_finn_onnx(fc, (1, 1, 28, 28), finn_onnx)
+    model = ModelWrapper(finn_onnx)
+    model = model.transform(InferShapes())
+    model = model.transform(FoldConstants())
+    model = model.transform(RemoveStaticGraphInputs())
+    assert len(model.graph.input) == 1
+    assert len(model.graph.output) == 1
+    # load one of the test vectors
+    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
+    input_tensor = onnx.load_tensor_from_string(raw_i)
+    # run using FINN-based execution
+    input_dict = {"0": nph.to_array(input_tensor)}
+    output_dict = oxe.execute_onnx(model, input_dict, return_full_exec_context=True)
+    produced = output_dict[model.graph.output[0].name]
+    # run using PyTorch/Brevitas
+    input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float()
+    assert input_tensor.shape == (1, 1, 28, 28)
+    # do forward pass in PyTorch/Brevitas
+    expected = fc.forward(input_tensor).detach().numpy()
+    assert np.isclose(produced, expected, atol=1e-3).all()
+    # check all tensors at debug markers
+    names_brevitas = set(dbg_hook.values.keys())
+    names_finn = set(output_dict.keys())
+    names_common = names_brevitas.intersection(names_finn)
+    assert len(names_common) == 16
+    for dbg_name in names_common:
+        tensor_pytorch = dbg_hook.values[dbg_name].detach().numpy()
+        tensor_finn = output_dict[dbg_name]
+        assert np.isclose(tensor_finn, tensor_pytorch, atol=1e-5).all()
+    os.remove(finn_onnx)
diff --git a/tests/brevitas/test_brevitas_fc.py b/tests/brevitas/test_brevitas_fc.py
index db18d91e3590e896e111c9e38bdc4de43872a98c..9369b25385080875efcb286c02291fc579a15a34 100644
--- a/tests/brevitas/test_brevitas_fc.py
+++ b/tests/brevitas/test_brevitas_fc.py
@@ -39,6 +39,7 @@ import torch
 import finn.core.onnx_exec as oxe
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fold_constants import FoldConstants
+from finn.transformation.general import RemoveStaticGraphInputs
 from finn.transformation.infer_shapes import InferShapes
 from finn.util.basic import make_build_dir
 from finn.util.test import get_test_model_trained
@@ -63,6 +64,9 @@ def test_brevitas_fc_onnx_export_and_exec(size, wbits, abits):
     model = ModelWrapper(finn_onnx)
     model = model.transform(InferShapes())
     model = model.transform(FoldConstants())
+    model = model.transform(RemoveStaticGraphInputs())
+    assert len(model.graph.input) == 1
+    assert len(model.graph.output) == 1
     # load one of the test vectors
     raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
     input_tensor = onnx.load_tensor_from_string(raw_i)
diff --git a/tests/brevitas/test_brevitas_act_export.py b/tests/brevitas/test_brevitas_non_scaled_QuantHardTanh_export.py
similarity index 75%
rename from tests/brevitas/test_brevitas_act_export.py
rename to tests/brevitas/test_brevitas_non_scaled_QuantHardTanh_export.py
index 08c4a99151d1105ad4258a8d7d6c19cc72da7a99..9c7296b7b3b6d36cfb43b6d9e96e7fba6bbce49a 100644
--- a/tests/brevitas/test_brevitas_act_export.py
+++ b/tests/brevitas/test_brevitas_non_scaled_QuantHardTanh_export.py
@@ -1,22 +1,32 @@
+import os
+import onnx  # noqa
 import numpy as np
 import torch
 import brevitas.onnx as bo
 from brevitas.nn import QuantHardTanh
 from brevitas.core.restrict_val import RestrictValueType
 from brevitas.core.scaling import ScalingImplType
-from models.common import get_quant_type
 import pytest
 from finn.core.modelwrapper import ModelWrapper
 import finn.core.onnx_exec as oxe
 from finn.transformation.infer_shapes import InferShapes
+from brevitas.core.quant import QuantType
 
-export_onnx_path = "test_act.onnx"
+export_onnx_path = "test_brevitas_non_scaled_QuantHardTanh_export.onnx"
 
 
 @pytest.mark.parametrize("abits", [1, 2, 4, 8])
 @pytest.mark.parametrize("narrow_range", [False, True])
 @pytest.mark.parametrize("max_val", [1.0, 1 - 2 ** (-7)])
-def test_brevitas_act_export(abits, narrow_range, max_val):
+def test_brevitas_act_export_qhardtanh_nonscaled(abits, narrow_range, max_val):
+    def get_quant_type(bit_width):
+        if bit_width is None:
+            return QuantType.FP
+        elif bit_width == 1:
+            return QuantType.BINARY
+        else:
+            return QuantType.INT
+
     act_quant_type = get_quant_type(abits)
     min_val = -1.0
     ishape = (1, 10)
@@ -41,3 +51,4 @@ def test_brevitas_act_export(abits, narrow_range, max_val):
     inp_tensor = torch.from_numpy(inp_tensor).float()
     expected = b_act.forward(inp_tensor).detach().numpy()
     assert np.isclose(produced, expected, atol=1e-3).all()
+    os.remove(export_onnx_path)
diff --git a/tests/brevitas/test_brevitas_relu_act_export.py b/tests/brevitas/test_brevitas_relu_act_export.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa114585d31fca629aa759e386aa3fbd04280a2e
--- /dev/null
+++ b/tests/brevitas/test_brevitas_relu_act_export.py
@@ -0,0 +1,127 @@
+import os
+import onnx  # noqa
+import numpy as np
+import torch
+import brevitas.onnx as bo
+from brevitas.nn import QuantReLU
+from brevitas.core.quant import QuantType
+from brevitas.core.restrict_val import RestrictValueType
+from brevitas.core.scaling import ScalingImplType
+import pytest
+from finn.core.modelwrapper import ModelWrapper
+import finn.core.onnx_exec as oxe
+from finn.transformation.infer_shapes import InferShapes
+
+export_onnx_path = "test_brevitas_relu_act_export.onnx"
+
+
+@pytest.mark.parametrize("abits", [2, 4, 8])
+@pytest.mark.parametrize("max_val", [1.0, 1.5, 1 - 2 ** (-7)])
+@pytest.mark.parametrize(
+    "scaling_impl_type", [ScalingImplType.CONST, ScalingImplType.PARAMETER]
+)
+def test_brevitas_act_export_relu(abits, max_val, scaling_impl_type):
+    min_val = -1.0
+    ishape = (1, 15)
+
+    b_act = QuantReLU(
+        bit_width=abits,
+        max_val=max_val,
+        scaling_impl_type=scaling_impl_type,
+        restrict_scaling_type=RestrictValueType.LOG_FP,
+        quant_type=QuantType.INT,
+    )
+    if scaling_impl_type == ScalingImplType.PARAMETER:
+        checkpoint = {
+            "act_quant_proxy.fused_activation_quant_proxy.tensor_quant.\
+scaling_impl.learned_value": torch.tensor(
+                0.49
+            ).type(
+                torch.FloatTensor
+            )
+        }
+        b_act.load_state_dict(checkpoint)
+
+    bo.export_finn_onnx(b_act, ishape, export_onnx_path)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    inp_tensor = np.random.uniform(low=min_val, high=max_val, size=ishape).astype(
+        np.float32
+    )
+    idict = {model.graph.input[0].name: inp_tensor}
+    odict = oxe.execute_onnx(model, idict, True)
+    produced = odict[model.graph.output[0].name]
+    inp_tensor = torch.from_numpy(inp_tensor).float()
+    b_act.eval()
+    expected = b_act.forward(inp_tensor).detach().numpy()
+    if not np.isclose(produced, expected, atol=1e-3).all():
+        print(abits, max_val, scaling_impl_type)
+        print("scale: ", b_act.quant_act_scale().type(torch.FloatTensor).detach())
+        if abits < 5:
+            print(
+                "thres:",
+                ", ".join(["{:8.4f}".format(x) for x in b_act.export_thres[0]]),
+            )
+        print("input:", ", ".join(["{:8.4f}".format(x) for x in inp_tensor[0]]))
+        print("prod :", ", ".join(["{:8.4f}".format(x) for x in produced[0]]))
+        print("expec:", ", ".join(["{:8.4f}".format(x) for x in expected[0]]))
+
+    assert np.isclose(produced, expected, atol=1e-3).all()
+    os.remove(export_onnx_path)
+
+
+@pytest.mark.parametrize("abits", [2, 4, 8])
+@pytest.mark.parametrize("max_val", [1.0, 1.5, 1 - 2 ** (-7)])
+@pytest.mark.parametrize("scaling_per_channel", [True, False])
+def test_brevitas_act_export_relu_imagenet(abits, max_val, scaling_per_channel):
+    out_channels = 32
+    ishape = (1, out_channels, 1, 1)
+    min_val = -1.0
+    b_act = QuantReLU(
+        bit_width=abits,
+        quant_type=QuantType.INT,
+        scaling_impl_type=ScalingImplType.PARAMETER,
+        scaling_per_channel=scaling_per_channel,
+        restrict_scaling_type=RestrictValueType.LOG_FP,
+        scaling_min_val=2e-16,
+        max_val=6.0,
+        return_quant_tensor=True,
+        per_channel_broadcastable_shape=(1, out_channels, 1, 1),
+    )
+    if scaling_per_channel is True:
+        rand_tensor = (2) * torch.rand((1, out_channels, 1, 1))
+    else:
+        rand_tensor = torch.tensor(1.2398)
+    checkpoint = {
+        "act_quant_proxy.fused_activation_quant_proxy.tensor_quant.\
+scaling_impl.learned_value": rand_tensor.type(
+            torch.FloatTensor
+        )
+    }
+    b_act.load_state_dict(checkpoint)
+    bo.export_finn_onnx(b_act, ishape, export_onnx_path)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    inp_tensor = np.random.uniform(low=min_val, high=max_val, size=ishape).astype(
+        np.float32
+    )
+    idict = {model.graph.input[0].name: inp_tensor}
+    odict = oxe.execute_onnx(model, idict, True)
+    produced = odict[model.graph.output[0].name]
+    inp_tensor = torch.from_numpy(inp_tensor).float()
+    b_act.eval()
+    expected = b_act.forward(inp_tensor).tensor.detach().numpy()
+    if not np.isclose(produced, expected, atol=1e-3).all():
+        print(abits, max_val)
+        print("scale: ", b_act.quant_act_scale().type(torch.FloatTensor).detach())
+        if abits < 5:
+            print(
+                "thres:",
+                ", ".join(["{:8.4f}".format(x) for x in b_act.export_thres[0]]),
+            )
+        print("input:", ", ".join(["{:8.4f}".format(x) for x in inp_tensor[0]]))
+        print("prod :", ", ".join(["{:8.4f}".format(x) for x in produced[0]]))
+        print("expec:", ", ".join(["{:8.4f}".format(x) for x in expected[0]]))
+
+    assert np.isclose(produced, expected, atol=1e-3).all()
+    os.remove(export_onnx_path)
diff --git a/tests/brevitas/test_brevitas_scaled_QHardTanh_export.py b/tests/brevitas/test_brevitas_scaled_QHardTanh_export.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0ec82ebed44e2e984be9f62e02bc1721a7f9c33
--- /dev/null
+++ b/tests/brevitas/test_brevitas_scaled_QHardTanh_export.py
@@ -0,0 +1,93 @@
+import onnx  # noqa
+import os
+import numpy as np
+import torch
+import brevitas.onnx as bo
+from brevitas.nn import QuantHardTanh
+from brevitas.core.restrict_val import RestrictValueType
+from brevitas.core.quant import QuantType
+from brevitas.core.scaling import ScalingImplType
+import pytest
+from finn.core.modelwrapper import ModelWrapper
+import finn.core.onnx_exec as oxe
+from finn.transformation.infer_shapes import InferShapes
+
+export_onnx_path = "test_brevitas_scaled_QHardTanh_export.onnx"
+
+
+@pytest.mark.parametrize("abits", [2, 4, 8])
+@pytest.mark.parametrize("narrow_range", [False, True])
+@pytest.mark.parametrize("min_val", [-1.0, -(1 - 2 ** (-7)), -2])
+@pytest.mark.parametrize("max_val", [1.0, 1 - 2 ** (-7), 2])
+@pytest.mark.parametrize(
+    "scaling_impl_type", [ScalingImplType.CONST, ScalingImplType.PARAMETER]
+)
+def test_brevitas_act_export_qhardtanh_scaled(
+    abits, narrow_range, min_val, max_val, scaling_impl_type
+):
+    def get_quant_type(bit_width):
+        if bit_width is None:
+            return QuantType.FP
+        elif bit_width == 1:
+            return QuantType.BINARY
+        else:
+            return QuantType.INT
+
+    act_quant_type = get_quant_type(abits)
+    ishape = (1, 15)
+    b_act = QuantHardTanh(
+        bit_width=abits,
+        quant_type=act_quant_type,
+        max_val=max_val,
+        min_val=min_val,
+        restrict_scaling_type=RestrictValueType.LOG_FP,
+        scaling_impl_type=scaling_impl_type,
+        narrow_range=narrow_range,
+    )
+    if scaling_impl_type == ScalingImplType.PARAMETER:
+        checkpoint = {
+            "act_quant_proxy.fused_activation_quant_proxy.\
+tensor_quant.scaling_impl.learned_value": torch.tensor(
+                0.49
+            ).type(
+                torch.FloatTensor
+            )
+        }
+        b_act.load_state_dict(checkpoint)
+
+    bo.export_finn_onnx(b_act, ishape, export_onnx_path)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    inp_tensor = np.random.uniform(low=min_val, high=max_val, size=ishape).astype(
+        np.float32
+    )
+    idict = {model.graph.input[0].name: inp_tensor}
+    odict = oxe.execute_onnx(model, idict, True)
+    produced = odict[model.graph.output[0].name]
+    inp_tensor = torch.from_numpy(inp_tensor).float()
+    b_act.eval()
+    expected = b_act.forward(inp_tensor).detach().numpy()
+    if not np.isclose(produced, expected, atol=1e-3).all():
+        print(
+            "abits: ",
+            abits,
+            " | narrow_range: ",
+            narrow_range,
+            " | min_val: ",
+            min_val,
+            " | max_val: ",
+            max_val,
+        )
+        print("layer scale: ", b_act.quant_act_scale().type(torch.FloatTensor).detach())
+        print("export scale: ", b_act.export_act_scale)
+        if abits < 5:
+            print(
+                "thres:",
+                ", ".join(["{:8.4f}".format(x) for x in b_act.export_thres[0]]),
+            )
+        print("input:", ", ".join(["{:8.4f}".format(x) for x in inp_tensor[0]]))
+        print("prod :", ", ".join(["{:8.4f}".format(x) for x in produced[0]]))
+        print("expec:", ", ".join(["{:8.4f}".format(x) for x in expected[0]]))
+
+    assert np.isclose(produced, expected, atol=1e-3).all()
+    os.remove(export_onnx_path)
diff --git a/tests/core/test_basic_onnx_exec.py b/tests/core/test_basic_onnx_exec.py
index a7b6da9965aa5912870812a8c1f8d6da2ee0d181..ddb2cbfc40c7647970f0c51ecb95340e7d1dddae 100644
--- a/tests/core/test_basic_onnx_exec.py
+++ b/tests/core/test_basic_onnx_exec.py
@@ -35,6 +35,8 @@ import onnx.numpy_helper as np_helper
 import finn.core.onnx_exec as oxe
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.infer_shapes import InferShapes
+from finn.core.datatype import DataType
+from finn.util.basic import gen_finn_dt_tensor
 
 
 def test_mnist_onnx_download_extract_run():
@@ -47,9 +49,50 @@ def test_mnist_onnx_download_extract_run():
     raw_o = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/output_0.pb")
     input_tensor = onnx.load_tensor_from_string(raw_i)
     output_tensor = onnx.load_tensor_from_string(raw_o)
-    # run using FINN-based execution
+    # run using FINN-based execution (full graph)
     input_dict = {"Input3": np_helper.to_array(input_tensor)}
-    output_dict = oxe.execute_onnx(model, input_dict)
+    output_dict = oxe.execute_onnx(model, input_dict, return_full_exec_context=True)
     assert np.isclose(
         np_helper.to_array(output_tensor), output_dict["Plus214_Output_0"], atol=1e-3
     ).all()
+    # test subgraph execution
+    start_node = model.graph.node[1]
+    end_node = model.graph.node[3]
+    subgraph_i_dict = {start_node.input[0]: output_dict[start_node.input[0]]}
+    subgraph_o_dict = oxe.execute_onnx(
+        model,
+        subgraph_i_dict,
+        return_full_exec_context=True,
+        start_node=start_node,
+        end_node=end_node,
+    )
+    assert np.isclose(
+        subgraph_o_dict[end_node.output[0]], output_dict[end_node.output[0]], atol=1e-3
+    ).all()
+
+
+def test_onnx_exec_internal_rounding():
+    inp0 = onnx.helper.make_tensor_value_info("inp0", onnx.TensorProto.FLOAT, [2, 2])
+    inp1 = onnx.helper.make_tensor_value_info("inp1", onnx.TensorProto.FLOAT, [1])
+    outp = onnx.helper.make_tensor_value_info("outp", onnx.TensorProto.FLOAT, [2, 2])
+    mul_node = onnx.helper.make_node("Mul", inputs=["inp0", "inp1"], outputs=["outp"])
+    graph = onnx.helper.make_graph(
+        nodes=[mul_node], name="mul_graph", inputs=[inp0, inp1], outputs=[outp]
+    )
+
+    model = onnx.helper.make_model(graph, producer_name="mul-model")
+    model = ModelWrapper(model)
+    idt = DataType.INT2
+    model.set_tensor_datatype("inp0", idt)
+    model.set_tensor_datatype("inp1", idt)
+    model.transform(InferShapes())
+
+    mul_value = np.asarray([-1], dtype=np.float32)
+    inp_int = gen_finn_dt_tensor(idt, [2, 2])
+    scale = np.random.uniform(low=0, high=1, size=(2, 2)).astype(np.float32)
+    inp_rounded = (inp_int * scale) / (scale + 1e-7)
+    input_dict = {"inp0": inp_rounded, "inp1": mul_value}
+    output_dict = oxe.execute_onnx(model, input_dict)
+    produced = output_dict["outp"]
+    expected = np.multiply(inp_int, mul_value)
+    assert (produced == expected).all()
diff --git a/tests/core/test_modelwrapper.py b/tests/core/test_modelwrapper.py
index 942eda19ca4c2cdbded9f906a5e7772f50acbd6e..0fb7ae42f3bd556755f81a02be6c71fd73ffc519 100644
--- a/tests/core/test_modelwrapper.py
+++ b/tests/core/test_modelwrapper.py
@@ -27,15 +27,16 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import os
+import onnx
 from collections import Counter
-
 import brevitas.onnx as bo
 import numpy as np
+import finn.core.data_layout as DataLayout
 
 from finn.core.modelwrapper import ModelWrapper
 from finn.util.test import get_test_model_trained
 
-export_onnx_path = "test_output_lfc.onnx"
+export_onnx_path = "test_modelwrapper.onnx"
 
 
 def test_modelwrapper():
@@ -66,5 +67,110 @@ def test_modelwrapper():
     inp_cons = model.find_consumer(l0_inp_tensor_name)
     assert inp_cons.op_type == "MatMul"
     out_prod = model.find_producer(l0_inp_tensor_name)
-    assert out_prod.op_type == "Sign"
+    assert out_prod.op_type == "MultiThreshold"
+    inp_layout = model.get_tensor_layout(inp_name)
+    assert inp_layout is None
+    inp_layout = DataLayout.NCHW
+    model.set_tensor_layout(inp_name, inp_layout)
+    assert model.get_tensor_layout(inp_name) == inp_layout
+    inp_sparsity = model.get_tensor_sparsity(inp_name)
+    assert inp_sparsity is None
+    inp_sparsity = {"dw": {"kernel_shape": 3}}
+    model.set_tensor_sparsity(inp_name, inp_sparsity)
+    assert model.get_tensor_sparsity(inp_name) == inp_sparsity
     os.remove(export_onnx_path)
+
+
+def test_modelwrapper_graph_order():
+    # create small network with properties to be tested
+    Neg_node = onnx.helper.make_node("Neg", inputs=["in1"], outputs=["neg1"])
+    Round_node = onnx.helper.make_node("Round", inputs=["neg1"], outputs=["round1"])
+
+    Ceil_node = onnx.helper.make_node("Ceil", inputs=["neg1"], outputs=["ceil1"])
+    Add_node = onnx.helper.make_node(
+        "Add", inputs=["round1", "ceil1"], outputs=["out1"]
+    )
+
+    in1 = onnx.helper.make_tensor_value_info("in1", onnx.TensorProto.FLOAT, [4, 4])
+    out1 = onnx.helper.make_tensor_value_info("out1", onnx.TensorProto.FLOAT, [4, 4])
+
+    graph = onnx.helper.make_graph(
+        nodes=[Neg_node, Round_node, Ceil_node, Add_node],
+        name="simple_graph",
+        inputs=[in1],
+        outputs=[out1],
+        value_info=[
+            onnx.helper.make_tensor_value_info("neg1", onnx.TensorProto.FLOAT, [4, 4]),
+            onnx.helper.make_tensor_value_info(
+                "round1", onnx.TensorProto.FLOAT, [4, 4]
+            ),
+            onnx.helper.make_tensor_value_info("ceil1", onnx.TensorProto.FLOAT, [4, 4]),
+        ],
+    )
+
+    onnx_model = onnx.helper.make_model(graph, producer_name="simple-model")
+    model = ModelWrapper(onnx_model)
+
+    # test graph order functions
+    assert model.find_consumers("in1") == [Neg_node]
+    assert model.find_consumers("neg1") == [Round_node, Ceil_node]
+    assert model.find_consumers("round1") == [Add_node]
+    assert model.find_consumers("ceil1") == [Add_node]
+    assert model.find_consumers("out1") is None
+
+    assert model.find_direct_successors(Neg_node) == [Round_node, Ceil_node]
+    assert model.find_direct_successors(Round_node) == [Add_node]
+    assert model.find_direct_successors(Ceil_node) == [Add_node]
+    assert model.find_direct_successors(Add_node) is None
+
+    assert model.find_direct_predecessors(Neg_node) is None
+    assert model.find_direct_predecessors(Round_node) == [Neg_node]
+    assert model.find_direct_predecessors(Ceil_node) == [Neg_node]
+    assert model.find_direct_predecessors(Add_node) == [Round_node, Ceil_node]
+
+    assert model.get_node_index(Neg_node) == 0
+    assert model.get_node_index(Round_node) == 1
+    assert model.get_node_index(Ceil_node) == 2
+    assert model.get_node_index(Add_node) == 3
+
+
+def test_modelwrapper_detect_forks_n_joins():
+    # create small network with properties to be tested
+    Neg_node = onnx.helper.make_node("Neg", inputs=["in1"], outputs=["neg1"])
+    Round_node = onnx.helper.make_node("Round", inputs=["neg1"], outputs=["round1"])
+
+    Ceil_node = onnx.helper.make_node("Ceil", inputs=["neg1"], outputs=["ceil1"])
+    Add_node = onnx.helper.make_node(
+        "Add", inputs=["round1", "ceil1"], outputs=["out1"]
+    )
+
+    in1 = onnx.helper.make_tensor_value_info("in1", onnx.TensorProto.FLOAT, [4, 4])
+    out1 = onnx.helper.make_tensor_value_info("out1", onnx.TensorProto.FLOAT, [4, 4])
+
+    graph = onnx.helper.make_graph(
+        nodes=[Neg_node, Round_node, Ceil_node, Add_node],
+        name="simple_graph",
+        inputs=[in1],
+        outputs=[out1],
+        value_info=[
+            onnx.helper.make_tensor_value_info("neg1", onnx.TensorProto.FLOAT, [4, 4]),
+            onnx.helper.make_tensor_value_info(
+                "round1", onnx.TensorProto.FLOAT, [4, 4]
+            ),
+            onnx.helper.make_tensor_value_info("ceil1", onnx.TensorProto.FLOAT, [4, 4]),
+        ],
+    )
+
+    onnx_model = onnx.helper.make_model(graph, producer_name="simple-model")
+    model = ModelWrapper(onnx_model)
+
+    # test
+    assert model.is_fork_node(Neg_node)
+    assert not model.is_fork_node(Round_node)
+    assert not model.is_fork_node(Ceil_node)
+    assert not model.is_fork_node(Add_node)
+
+    assert not model.is_join_node(Neg_node)
+    assert not model.is_join_node(Round_node)
+    assert not model.is_join_node(Ceil_node)
+    assert model.is_join_node(Add_node)
diff --git a/tests/custom_op/test_multi_thresholding.py b/tests/custom_op/test_multithreshold.py
similarity index 61%
rename from tests/custom_op/test_multi_thresholding.py
rename to tests/custom_op/test_multithreshold.py
index 4f2b08675fdabb1bda49972c51892da92e1a0cdc..7e6ad4fe08517290dd22a2c74b2847d007b74b1f 100644
--- a/tests/custom_op/test_multi_thresholding.py
+++ b/tests/custom_op/test_multithreshold.py
@@ -27,11 +27,76 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import numpy as np
-
+import time
 from finn.custom_op.multithreshold import multithreshold
 
 
-def test_execute_multi_thresholding():
+def compare(x, y):
+    """Comparison helper function for multithresholding.
+
+    Gets two values and returns 1.0 if x>=y otherwise 0.0."""
+    if x >= y:
+        return 1.0
+    else:
+        return 0.0
+
+# naive implementation of thresholding for performance comparison
+def multithreshold_elementwise(v, thresholds, out_scale=None, out_bias=None):
+    """Given a set of threshold values t={t_0, t_1 ... t_n} the successive
+    thresholding maps any real number x to an integer in the interval [0, n],
+    where the returned integer is the number of thresholds x is greater than
+    or equal to.
+
+    The output tensor will be scaled by out_scale and biased by out_bias."""
+    # the inputs are expected to be in the shape (N,C,H,W) or (N, C)
+    # the MultiThreshold node supports a data_layout attribute that can be set
+    # to 'NHWC' to support (N,H,W,C) data layout mode for in-out as well
+    # N : Batch size
+    # C : Number of channels
+    # H : Heigth of the input images
+    # W : Width of the input images
+    #
+    # the thresholds are expected to be in the shape (C, B)
+    # C : Number of channels (must be the same value as C in input tensor
+    #     or 1 if all channels use the same threshold value)
+    # B : Desired activation steps => i.e. for 4-bit activation,
+    #     B=7 (2^(n)-1 and n=4)
+    # the output tensor will be scaled by out_scale and biased by out_bias
+    # assert threshold shape
+    is_global_threshold = thresholds.shape[0] == 1
+    assert (
+        v.shape[1] == thresholds.shape[0]
+    ) or is_global_threshold, """"Threshold
+    shape incorrect"""
+    # save the required shape sizes for the loops (N, C and B)
+    num_batch = v.shape[0]
+    num_channel = v.shape[1]
+    num_act = thresholds.shape[1]
+    # reshape inputs to enable channel-wise reading
+    vr = v.reshape((v.shape[0], v.shape[1], -1))
+    # save the new shape size of the images
+    num_img_elem = vr.shape[2]
+    # initiate output tensor
+    ret = np.zeros_like(vr)
+    # iterate over thresholds channel-wise
+    for t in range(num_channel):
+        channel_thresh = thresholds[0] if is_global_threshold else thresholds[t]
+        # iterate over batches
+        for b in range(num_batch):
+            # iterate over image elements on which the thresholds will be applied
+            for elem in range(num_img_elem):
+                # iterate over the different thresholds for one channel
+                for a in range(num_act):
+                    # apply successive thresholding to every element
+                    ret[b][t][elem] += compare(vr[b][t][elem], channel_thresh[a])
+    if out_scale is None:
+        out_scale = 1.0
+    if out_bias is None:
+        out_bias = 0.0
+    return out_scale * ret.reshape(v.shape) + out_bias
+
+
+def test_multithreshold():
 
     inputs = np.ndarray(
         shape=(6, 3, 2, 2),
@@ -223,9 +288,35 @@ def test_execute_multi_thresholding():
     )
 
     results = multithreshold(inputs, thresholds)
-
     assert (results == outputs).all()
 
     results_scaled = multithreshold(inputs, thresholds, 2.0, -1.0)
     outputs_scaled = 2.0 * outputs - 1.0
     assert (results_scaled == outputs_scaled).all()
+
+    # performance and random test
+    np.random.seed(0)
+    inputs = np.random.random((1, 256, 64, 64))
+    thresholds = (np.array([[1, 2, 3, 4, 5, 6]]) - 0.5) / 6
+
+    before = time.time()
+    vec_results = multithreshold(inputs, thresholds)
+    after = time.time()
+    vector_runtime = after - before
+
+    before = time.time()
+    nonvec_results = multithreshold_elementwise(inputs, thresholds)
+    after = time.time()
+    non_vector_runtime = after - before
+
+    assert (vec_results == nonvec_results).all()
+
+    return vector_runtime, non_vector_runtime
+
+
+if __name__ == "__main__":
+    vector_runtime, non_vector_runtime = test_multithreshold()
+
+    print("Runtime non-vectorized: ", non_vector_runtime, "s")
+    print("Runtime vectorized: ", vector_runtime, "s")
+    print("Speed-up: ", non_vector_runtime / vector_runtime)
diff --git a/tests/custom_op/test_xnorpopcountmatmul.py b/tests/custom_op/test_xnorpopcountmatmul.py
index 6b59283667ac05f569e5c3d80dbfc1530616d045..745b782d418129d96e21c327a49de04d53aa7c48 100644
--- a/tests/custom_op/test_xnorpopcountmatmul.py
+++ b/tests/custom_op/test_xnorpopcountmatmul.py
@@ -47,11 +47,7 @@ from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.streamline.sign_to_thres import ConvertSignToThres
 from finn.util.test import get_test_model_trained
 
-export_onnx_path = "test_output_lfc.onnx"
-# TODO get from config instead, hardcoded to Docker path for now
-trained_lfc_checkpoint = (
-    "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar"
-)
+export_onnx_path = "test_xnorpopcountmatmul.onnx"
 
 
 def test_xnorpopcountmatmul():
diff --git a/tests/end2end/test_end2end_bnn_pynq.py b/tests/end2end/test_end2end_bnn_pynq.py
new file mode 100644
index 0000000000000000000000000000000000000000..4eed1a260974e4f842e9e93756caff135c5fbdde
--- /dev/null
+++ b/tests/end2end/test_end2end_bnn_pynq.py
@@ -0,0 +1,651 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import pytest
+
+import numpy as np
+
+# as of Feb'20 there is a bug that segfaults ONNX shape inference if we
+# import pytorch before onnx, so we make sure to import onnx first
+import onnx  # NOQA
+import torch
+import brevitas.onnx as bo
+
+import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
+import finn.transformation.streamline.absorb as absorb
+from finn.core.onnx_exec import execute_onnx
+from finn.custom_op.registry import getCustomOp
+from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount
+from finn.transformation.fold_constants import FoldConstants
+
+from finn.transformation.fpgadataflow.create_dataflow_partition import (
+    CreateDataflowPartition,
+)
+from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
+from finn.transformation.general import (
+    RemoveUnusedTensors,
+    RemoveStaticGraphInputs,
+    GiveReadableTensorNames,
+    GiveUniqueNodeNames,
+)
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.streamline import Streamline
+from finn.util.test import (
+    get_build_env,
+    load_test_checkpoint_or_skip,
+    get_example_input,
+    get_trained_network_and_ishape,
+    execute_parent,
+    get_topk,
+)
+from finn.transformation.fpgadataflow.annotate_resources import AnnotateResources
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.move_reshape import RemoveCNVtoFCFlatten
+from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul
+from finn.transformation.streamline.reorder import (
+    MakeMaxPoolNHWC,
+    MoveScalarLinearPastInvariants,
+)
+import warnings
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
+from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
+from finn.transformation.fpgadataflow.annotate_cycles import AnnotateCycles
+from finn.analysis.fpgadataflow.dataflow_performance import dataflow_performance
+from finn.core.modelwrapper import ModelWrapper
+from scipy.stats import linregress
+from finn.core.throughput_test import throughput_test_remote, throughput_test_rtlsim
+from finn.util.pytorch import ToTensor
+from finn.transformation.merge_onnx_models import MergeONNXModels
+from finn.transformation.insert_topk import InsertTopK
+from finn.core.datatype import DataType
+from dataset_loading import mnist, cifar
+from datetime import datetime
+import subprocess
+from finn.util.gdrive import upload_to_end2end_dashboard
+from collections import OrderedDict
+
+build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
+target_clk_ns = 10
+mem_mode = "decoupled"
+rtlsim_trace = False
+
+
+def get_checkpoint_name(topology, wbits, abits, step):
+    return build_dir + "/end2end_%s_w%da%d_%s.onnx" % (topology, wbits, abits, step)
+
+
+def get_dashboard_data(topology, wbits, abits):
+    stats_file = build_dir + "/end2end_%s_w%da%d.txt" % (topology, wbits, abits)
+    stats_dict = OrderedDict()
+    if os.path.isfile(stats_file):
+        with open(stats_file, "r") as f:
+            stats_dict_txt = f.read()
+        stats_dict = eval(stats_dict_txt)
+    return stats_dict
+
+
+def update_dashboard_data(topology, wbits, abits, key, val):
+    stats_dict = get_dashboard_data(topology, wbits, abits)
+    stats_dict[key] = val
+    stats_file = build_dir + "/end2end_%s_w%da%d.txt" % (topology, wbits, abits)
+    with open(stats_file, "w") as f:
+        f.write(str(stats_dict))
+
+
+def fold_tfc(model):
+    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
+    # (PE, SIMD, in_fifo_depth, out_fifo_depth, ramstyle) for each layer
+    config = [
+        (16, 49, 16, 64, "block"),
+        (8, 8, 64, 64, "auto"),
+        (8, 8, 64, 64, "auto"),
+        (10, 8, 64, 10, "distributed"),
+    ]
+    for fcl, (pe, simd, ififo, ofifo, ramstyle) in zip(fc_layers, config):
+        fcl_inst = getCustomOp(fcl)
+        fcl_inst.set_nodeattr("PE", pe)
+        fcl_inst.set_nodeattr("SIMD", simd)
+        fcl_inst.set_nodeattr("inFIFODepth", ififo)
+        fcl_inst.set_nodeattr("outFIFODepth", ofifo)
+        fcl_inst.set_nodeattr("ram_style", ramstyle)
+    # set parallelism for input quantizer to be same as first layer's SIMD
+    inp_qnt_node = model.get_nodes_by_op_type("Thresholding_Batch")[0]
+    inp_qnt = getCustomOp(inp_qnt_node)
+    inp_qnt.set_nodeattr("PE", 49)
+    return model
+
+
+def fold_cnv_large(model):
+    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
+    # each tuple is (PE, SIMD, in_fifo_depth) for a layer
+    folding = [
+        (16, 3, 256),
+        (32, 32, 256),
+        (16, 32, 256),
+        (16, 32, 256),
+        (4, 32, 214),
+        (1, 32, 2),
+        (1, 4, 126),
+        (1, 8, 62),
+        (5, 1, 6),
+    ]
+    for fcl, (pe, simd, ififodepth) in zip(fc_layers, folding):
+        fcl_inst = getCustomOp(fcl)
+        fcl_inst.set_nodeattr("PE", pe)
+        fcl_inst.set_nodeattr("SIMD", simd)
+        fcl_inst.set_nodeattr("inFIFODepth", ififodepth)
+
+    swg_layers = model.get_nodes_by_op_type("ConvolutionInputGenerator")
+    swg_idepth = [2, 51, 9, 106, 2, 2]
+    for i in range(len(swg_layers)):
+        swg_inst = getCustomOp(swg_layers[i])
+        simd = folding[i][1]
+        swg_inst.set_nodeattr("SIMD", simd)
+        swg_inst.set_nodeattr("inFIFODepth", swg_idepth[i])
+    return model
+
+
+def fold_cnv_small(model):
+    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
+    # each tuple is (PE, SIMD, in_fifo_depth) for a layer
+    folding = [
+        (8, 3, 256, "auto"),
+        (16, 16, 256, "auto"),
+        (8, 16, 256, "auto"),
+        (8, 16, 256, "block"),
+        (4, 8, 214, "auto"),
+        (1, 8, 2, "auto"),
+        (1, 2, 126, "distributed"),
+        (2, 2, 62, "block"),
+        (5, 1, 6, "distributed"),
+    ]
+    for fcl, (pe, simd, ififodepth, ramstyle) in zip(fc_layers, folding):
+        fcl_inst = getCustomOp(fcl)
+        fcl_inst.set_nodeattr("PE", pe)
+        fcl_inst.set_nodeattr("SIMD", simd)
+        fcl_inst.set_nodeattr("inFIFODepth", ififodepth)
+        fcl_inst.set_nodeattr("ram_style", ramstyle)
+
+    swg_layers = model.get_nodes_by_op_type("ConvolutionInputGenerator")
+    swg_idepth = [2, 51, 9, 106, 2, 2]
+    for i in range(len(swg_layers)):
+        swg_inst = getCustomOp(swg_layers[i])
+        simd = folding[i][1]
+        swg_inst.set_nodeattr("SIMD", simd)
+        swg_inst.set_nodeattr("inFIFODepth", swg_idepth[i])
+    return model
+
+
+def get_folding_function(topology, wbits, abits):
+    if "tfc" in topology:
+        return fold_tfc
+    elif "cnv" in topology:
+        if wbits == 1 and abits == 1:
+            return fold_cnv_large
+        else:
+            return fold_cnv_small
+    else:
+        raise Exception("Unknown topology/quantization combo for predefined folding")
+
+
+def get_golden_io_pair(topology, wbits, abits, preproc=ToTensor(), return_topk=None):
+    (model, ishape) = get_trained_network_and_ishape(topology, wbits, abits)
+    input_tensor_npy = get_example_input(topology)
+    input_tensor_torch = torch.from_numpy(input_tensor_npy).float()
+    if preproc is not None:
+        input_tensor_torch = preproc.forward(input_tensor_torch).detach()
+    output_tensor_npy = model.forward(input_tensor_torch).detach().numpy()
+    if return_topk is not None:
+        output_tensor_npy = get_topk(output_tensor_npy, k=return_topk)
+    return (input_tensor_npy, output_tensor_npy)
+
+
+def measure_top1_accuracy(model_chkpt, dataset, parent_chkpt=None):
+    if dataset == "cifar10":
+        trainx, trainy, testx, testy, valx, valy = cifar.load_cifar_data(
+            "/workspace/finn/dataset", download=True, one_hot=False
+        )
+    elif dataset == "mnist":
+        trainx, trainy, testx, testy, valx, valy = mnist.load_mnist_data(
+            "/workspace/finn/dataset", download=True, one_hot=False
+        )
+    else:
+        raise Exception("Unrecognized dataset")
+    # move from dataset_loader layout to ONNX layout: NHWC -> NCHW
+    testx = testx.transpose(0, 3, 1, 2)
+    model = ModelWrapper(model_chkpt)
+    iname = model.graph.input[0].name
+    oname = model.graph.output[0].name
+    if parent_chkpt is None:
+        ishape = model.get_tensor_shape(iname)
+    else:
+        parent_model = ModelWrapper(parent_chkpt)
+        parent_iname = parent_model.graph.input[0].name
+        ishape = parent_model.get_tensor_shape(parent_iname)
+    ok = 0
+    nok = 0
+    n_batches = testx.shape[0]
+    for i in range(n_batches):
+        tdata = testx[i].reshape(ishape).astype(np.float32)
+        exp = testy[i].item()
+        if parent_chkpt is not None:
+            y = execute_parent(parent_chkpt, model_chkpt, tdata)
+        else:
+            y = execute_onnx(model, {iname: tdata}, False)[oname]
+        ret = y.item()
+        if ret == exp:
+            ok += 1
+        else:
+            nok += 1
+        if i % 10 == 0:
+            print("%d : OK %d NOK %d " % (i, ok, nok))
+    acc_top1 = ok * 100.0 / (ok + nok)
+    warnings.warn("Final OK %d NOK %d top-1 %f" % (ok, nok, acc_top1))
+    return acc_top1
+
+
+def topology2dataset(topology):
+    if "fc" in topology:
+        return "mnist"
+    elif "cnv" in topology:
+        return "cifar10"
+    else:
+        raise Exception("Unrecognized topology")
+
+
+@pytest.mark.parametrize("wbits", [1, 2])
+@pytest.mark.parametrize("abits", [1, 2])
+@pytest.mark.parametrize("topology", ["tfc", "cnv"])
+class TestEnd2End:
+    def test_export(self, topology, wbits, abits):
+        if wbits > abits:
+            pytest.skip("No wbits > abits end2end network configs for now")
+        (model, ishape) = get_trained_network_and_ishape(topology, wbits, abits)
+        chkpt_name = get_checkpoint_name(topology, wbits, abits, "export")
+        bo.export_finn_onnx(model, ishape, chkpt_name)
+        nname = "%s_w%da%d" % (topology, wbits, abits)
+        update_dashboard_data(topology, wbits, abits, "network", nname)
+        dtstr = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+        update_dashboard_data(topology, wbits, abits, "datetime", dtstr)
+        finn_commit = subprocess.check_output(
+            ["git", "rev-parse", "HEAD"], cwd="/workspace/finn"
+        )
+        finn_commit = finn_commit.decode("utf-8").strip()
+        update_dashboard_data(topology, wbits, abits, "finn-commit", finn_commit)
+        assert os.path.isfile(chkpt_name)
+
+    def test_import_and_tidy(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "export")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        model = model.transform(InferShapes())
+        model = model.transform(FoldConstants())
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(GiveReadableTensorNames())
+        model = model.transform(InferDataTypes())
+        model = model.transform(RemoveStaticGraphInputs())
+        chkpt = get_checkpoint_name(topology, wbits, abits, "import_and_tidy")
+        model.save(chkpt)
+
+    def test_add_pre_and_postproc(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "import_and_tidy")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        global_inp_name = model.graph.input[0].name
+        ishape = model.get_tensor_shape(global_inp_name)
+        # preprocessing: torchvision's ToTensor divides uint8 inputs by 255
+        totensor_pyt = ToTensor()
+        chkpt_preproc_name = get_checkpoint_name(topology, wbits, abits, "preproc")
+        bo.export_finn_onnx(totensor_pyt, ishape, chkpt_preproc_name)
+        assert os.path.isfile(chkpt_preproc_name)
+        # join preprocessing and core model
+        pre_model = ModelWrapper(chkpt_preproc_name)
+        model = model.transform(MergeONNXModels(pre_model))
+        # add input quantization annotation: UINT8 for all BNN-PYNQ models
+        global_inp_name = model.graph.input[0].name
+        model.set_tensor_datatype(global_inp_name, DataType.UINT8)
+        # postprocessing: insert Top-1 node at the end
+        model = model.transform(InsertTopK(k=1))
+        chkpt_name = get_checkpoint_name(topology, wbits, abits, "pre_post")
+        # tidy-up again
+        model = model.transform(InferShapes())
+        model = model.transform(FoldConstants())
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(GiveReadableTensorNames())
+        model = model.transform(InferDataTypes())
+        model = model.transform(RemoveStaticGraphInputs())
+        model.save(chkpt_name)
+        assert os.path.isfile(chkpt_name)
+
+    def test_streamline(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "pre_post")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        # move past any reshapes to be able to streamline input scaling
+        model = model.transform(MoveScalarLinearPastInvariants())
+        model = model.transform(Streamline())
+        if "fc" not in topology:
+            model = model.transform(LowerConvsToMatMul())
+            model = model.transform(MakeMaxPoolNHWC())
+            model = model.transform(absorb.AbsorbTransposeIntoMultiThreshold())
+        model = model.transform(ConvertBipolarMatMulToXnorPopcount())
+        model = model.transform(Streamline())
+        # absorb final add-mul nodes into TopK
+        model = model.transform(absorb.AbsorbScalarMulAddIntoTopK())
+        model = model.transform(InferDataLayouts())
+        model = model.transform(RemoveUnusedTensors())
+        model.save(get_checkpoint_name(topology, wbits, abits, "streamline"))
+
+    def test_convert_to_hls_layers(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "streamline")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        # needed for bipolar MatMul layers
+        model = model.transform(to_hls.InferBinaryStreamingFCLayer(mem_mode))
+        # needed for non-bipolar MatMul layers
+        model = model.transform(to_hls.InferQuantizedStreamingFCLayer(mem_mode))
+        # TopK to LabelSelect
+        model = model.transform(to_hls.InferLabelSelectLayer())
+        # input quantization (if any) to standalone thresholding
+        model = model.transform(to_hls.InferThresholdingLayer())
+        # needed for convolutions
+        if "fc" not in topology:
+            model = model.transform(to_hls.InferConvInpGen())
+            model = model.transform(to_hls.InferStreamingMaxPool())
+            model = model.transform(RemoveCNVtoFCFlatten())
+        # get rid of Tranpose -> Tranpose identity seq
+        model = model.transform(absorb.AbsorbConsecutiveTransposes())
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(InferDataLayouts())
+        model.save(get_checkpoint_name(topology, wbits, abits, "convert_to_hls_layers"))
+
+    def test_create_dataflow_partition(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(
+            topology, wbits, abits, "convert_to_hls_layers"
+        )
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        parent_model = model.transform(CreateDataflowPartition())
+        parent_model_chkpt = get_checkpoint_name(
+            topology, wbits, abits, "dataflow_parent"
+        )
+        parent_model.save(parent_model_chkpt)
+        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
+        sdp_node = getCustomOp(sdp_node)
+        dataflow_model_filename = sdp_node.get_nodeattr("model")
+        dataflow_model = load_test_checkpoint_or_skip(dataflow_model_filename)
+        dataflow_model_chkpt = get_checkpoint_name(
+            topology, wbits, abits, "dataflow_model"
+        )
+        dataflow_model.save(dataflow_model_chkpt)
+
+    def test_fold(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "dataflow_model")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        folding_fxn = get_folding_function(topology, wbits, abits)
+        model = folding_fxn(model)
+        model.save(get_checkpoint_name(topology, wbits, abits, "fold"))
+
+    @pytest.mark.slow
+    @pytest.mark.vivado
+    def test_cppsim(self, topology, wbits, abits):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "fold")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+        cppsim_chkpt = get_checkpoint_name(topology, wbits, abits, "cppsim")
+        model.save(cppsim_chkpt)
+        parent_chkpt = get_checkpoint_name(topology, wbits, abits, "dataflow_parent")
+        (input_tensor_npy, output_tensor_npy) = get_golden_io_pair(
+            topology, wbits, abits, return_topk=1
+        )
+        y = execute_parent(parent_chkpt, cppsim_chkpt, input_tensor_npy)
+        assert np.isclose(y, output_tensor_npy).all()
+
+    @pytest.mark.slow
+    @pytest.mark.vivado
+    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
+    def test_ipgen(self, topology, wbits, abits, kind):
+        if kind == "alveo" and ("VITIS_PATH" not in os.environ):
+            pytest.skip("VITIS_PATH not set")
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "fold")
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        test_fpga_part = get_build_env(kind, target_clk_ns)["part"]
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
+        model = model.transform(HLSSynthIP())
+        model.save(get_checkpoint_name(topology, wbits, abits, "ipgen_" + kind))
+
+    @pytest.mark.slow
+    @pytest.mark.vivado
+    @pytest.mark.parametrize("kind", ["zynq"])
+    def test_ipstitch_rtlsim(self, topology, wbits, abits, kind):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "ipgen_" + kind)
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        test_fpga_part = get_build_env(kind, target_clk_ns)["part"]
+        model = model.transform(InsertDWC())
+        model = model.transform(InsertFIFO())
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(AnnotateCycles())
+        perf = model.analysis(dataflow_performance)
+        latency = perf["critical_path_cycles"]
+        model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(CreateStitchedIP(test_fpga_part, target_clk_ns))
+        model = model.transform(PrepareRTLSim())
+        model.set_metadata_prop("exec_mode", "rtlsim")
+        os.environ["LIVENESS_THRESHOLD"] = str(int(latency * 1.1))
+        if rtlsim_trace:
+            model.set_metadata_prop(
+                "rtlsim_trace", "%s_w%da%d.vcd" % (topology, wbits, abits)
+            )
+            os.environ["RTLSIM_TRACE_DEPTH"] = "3"
+        rtlsim_chkpt = get_checkpoint_name(
+            topology, wbits, abits, "ipstitch_rtlsim_" + kind
+        )
+        model.save(rtlsim_chkpt)
+        parent_chkpt = get_checkpoint_name(topology, wbits, abits, "dataflow_parent")
+        (input_tensor_npy, output_tensor_npy) = get_golden_io_pair(
+            topology, wbits, abits, return_topk=1
+        )
+        y = execute_parent(parent_chkpt, rtlsim_chkpt, input_tensor_npy)
+        model = ModelWrapper(rtlsim_chkpt)
+        perf["cycles_rtlsim"] = model.get_metadata_prop("cycles_rtlsim")
+        # warnings.warn("Estimated & rtlsim performance: " + str(perf))
+        # for (k, v) in perf.items():
+        #    update_dashboard_data(topology, wbits, abits, k, v)
+        update_dashboard_data(
+            topology, wbits, abits, "cycles_rtlsim", perf["cycles_rtlsim"]
+        )
+        assert np.isclose(y, output_tensor_npy).all()
+
+    @pytest.mark.slow
+    @pytest.mark.vivado
+    @pytest.mark.parametrize("kind", ["zynq"])
+    def test_throughput_rtlsim(self, topology, wbits, abits, kind):
+        prev_chkpt_name = get_checkpoint_name(
+            topology, wbits, abits, "ipstitch_rtlsim_" + kind
+        )
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        n_nodes = len(model.graph.node)
+        perf_est = model.analysis(dataflow_performance)
+        latency = int(model.get_metadata_prop("cycles_rtlsim"))
+        cycles_per_sample_est = perf_est["max_cycles"]
+        batchsize = 2 * n_nodes
+        ret = throughput_test_rtlsim(model, batchsize=batchsize)
+        res_cycles = ret["cycles"]
+        est_cycles = latency + cycles_per_sample_est * batchsize
+        assert (abs(res_cycles - est_cycles) / res_cycles) < 0.15
+
+    @pytest.mark.slow
+    @pytest.mark.vivado
+    @pytest.mark.parametrize("kind", ["zynq"])
+    def test_validate_top1(self, topology, wbits, abits, kind):
+        if "TEST_END2END_VALIDATE_TOP1" not in os.environ:
+            pytest.skip("TEST_END2END_VALIDATE_TOP1 not set")
+        prepostproc_chkpt = get_checkpoint_name(topology, wbits, abits, "pre_post")
+        streamline_chkpt = get_checkpoint_name(topology, wbits, abits, "streamline")
+        parent_chkpt = get_checkpoint_name(topology, wbits, abits, "dataflow_parent")
+        cppsim_chkpt = get_checkpoint_name(topology, wbits, abits, "cppsim")
+        rtlsim_chkpt = get_checkpoint_name(
+            topology, wbits, abits, "ipstitch_rtlsim_" + kind
+        )
+        dataset = topology2dataset(topology)
+        assert measure_top1_accuracy(prepostproc_chkpt, dataset) > 80
+        assert measure_top1_accuracy(streamline_chkpt, dataset) > 80
+        assert measure_top1_accuracy(cppsim_chkpt, dataset, parent_chkpt) > 80
+        assert measure_top1_accuracy(rtlsim_chkpt, dataset, parent_chkpt) > 80
+
+    @pytest.mark.slow
+    @pytest.mark.vivado
+    @pytest.mark.vitis
+    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
+    def test_build(self, topology, wbits, abits, kind):
+        if kind == "alveo" and ("VITIS_PATH" not in os.environ):
+            pytest.skip("VITIS_PATH not set")
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "ipgen_" + kind)
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        cfg = get_build_env(kind, target_clk_ns)
+        model = model.transform(cfg["build_fxn"])
+        model = model.transform(AnnotateResources("synth"))
+        synth_dct = eval(model.get_metadata_prop("res_total_top_synth"))
+        for (k, v) in synth_dct.items():
+            update_dashboard_data(topology, wbits, abits, k, v)
+        update_dashboard_data(topology, wbits, abits, "board", cfg["board"])
+        model.save(get_checkpoint_name(topology, wbits, abits, "build_" + kind))
+
+    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
+    def test_deploy(self, topology, wbits, abits, kind):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "build_" + kind)
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)
+        cfg = get_build_env(kind, target_clk_ns)
+        if cfg["ip"] == "":
+            pytest.skip("PYNQ board IP address not specified")
+        model = model.transform(
+            DeployToPYNQ(
+                cfg["ip"],
+                cfg["port"],
+                cfg["username"],
+                cfg["password"],
+                cfg["target_dir"],
+            )
+        )
+        # save the model to be able to link it to the parent
+        model.save(get_checkpoint_name(topology, wbits, abits, "deploy_" + kind))
+
+    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
+    def test_run_on_hw(self, topology, wbits, abits, kind):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "deploy_" + kind)
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)  # NOQA
+        cfg = get_build_env(kind, target_clk_ns)
+        if cfg["ip"] == "":
+            pytest.skip("PYNQ board IP address not specified")
+        (input_tensor_npy, output_tensor_npy) = get_golden_io_pair(
+            topology, wbits, abits, return_topk=1
+        )
+        parent_model = load_test_checkpoint_or_skip(
+            get_checkpoint_name(topology, wbits, abits, "dataflow_parent")
+        )
+        iname = parent_model.graph.input[0].name
+        oname = parent_model.graph.output[0].name
+        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
+        sdp_node = getCustomOp(sdp_node)
+        sdp_node.set_nodeattr("model", prev_chkpt_name)
+        ret = execute_onnx(parent_model, {iname: input_tensor_npy}, True)
+        y = ret[oname]
+        assert np.isclose(y, output_tensor_npy).all()
+
+    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
+    def test_throughput_hw(self, topology, wbits, abits, kind):
+        prev_chkpt_name = get_checkpoint_name(topology, wbits, abits, "deploy_" + kind)
+        end2end_example = "%s_w%da%d_%s" % (topology, wbits, abits, kind)
+        model = load_test_checkpoint_or_skip(prev_chkpt_name)  # NOQA
+        cfg = get_build_env(kind, target_clk_ns)
+        if cfg["ip"] == "":
+            pytest.skip("PYNQ board IP address not specified")
+        ret = dict()
+        # try a range of batch sizes, some may fail due to insufficient DMA
+        # buffers
+        bsize_range_in = [8 ** i for i in range(5)]
+        bsize_range = []
+        for bsize in bsize_range_in:
+            res = throughput_test_remote(model, bsize)
+            if res is not None:
+                ret[bsize] = res
+                bsize_range.append(bsize)
+            else:
+                # assume we reached largest possible N
+                break
+        y = [ret[key]["runtime[ms]"] for key in bsize_range]
+        lrret = linregress(bsize_range, y)
+        ret_str = ""
+        ret_str += "\n" + "%s Throughput Test Results" % end2end_example
+        ret_str += "\n" + "-----------------------------"
+        ret_str += "\n" + "From linear regression:"
+        ret_str += "\n" + "Invocation overhead: %f ms" % lrret.intercept
+        ret_str += "\n" + "Time per sample: %f ms" % lrret.slope
+        ret_str += "\n" + "Raw data:"
+
+        ret_str += "\n" + "{:<8} {:<16} {:<16} {:<16} {:<16} {:<16}".format(
+            "N", "runtime[ms]", "fclk[mhz]", "fps", "DRAM rd[Mb/s]", "DRAM wr[Mb/s]"
+        )
+        for k in bsize_range:
+            v = ret[k]
+            ret_str += "\n" + "{:<8} {:<16} {:<16} {:<16} {:<16} {:<16}".format(
+                k,
+                np.round(v["runtime[ms]"], 4),
+                v["fclk[mhz]"],
+                np.round(v["throughput[images/s]"], 2),
+                np.round(v["DRAM_in_bandwidth[Mb/s]"], 2),
+                np.round(v["DRAM_out_bandwidth[Mb/s]"], 2),
+            )
+        ret_str += "\n" + "-----------------------------"
+        warnings.warn(ret_str)
+        largest_bsize = bsize_range[-1]
+        update_dashboard_data(
+            topology, wbits, abits, "fclk[mhz]", ret[largest_bsize]["fclk[mhz]"]
+        )
+        update_dashboard_data(
+            topology,
+            wbits,
+            abits,
+            "throughput[images/s]",
+            ret[largest_bsize]["throughput[images/s]"],
+        )
+
+    def test_upload_results_to_dashboard(self, topology, wbits, abits):
+        dashboard_data = get_dashboard_data(topology, wbits, abits)
+        if len(dashboard_data.keys()) > 0:
+            upload_to_end2end_dashboard(dashboard_data)
+        else:
+            pytest.skip("No data to upload to dashboard")
diff --git a/tests/end2end/test_end2end_cnv_w1a1.py b/tests/end2end/test_end2end_cnv_w1a1.py
deleted file mode 100644
index d7f59ef35aaf61891937dcaa105cf1392133e732..0000000000000000000000000000000000000000
--- a/tests/end2end/test_end2end_cnv_w1a1.py
+++ /dev/null
@@ -1,329 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-
-import numpy as np
-
-# as of Feb'20 there is a bug that segfaults ONNX shape inference if we
-# import pytorch before onnx, so we make sure to import onnx first
-import onnx  # NOQA
-
-import pytest
-import pkg_resources as pk
-from finn.core.modelwrapper import ModelWrapper
-from finn.custom_op.registry import getCustomOp
-from finn.core.onnx_exec import execute_onnx
-from finn.transformation.double_to_single_float import DoubleToSingleFloat
-from finn.transformation.infer_shapes import InferShapes
-from finn.transformation.move_reshape import MoveReshape
-from finn.transformation.fold_constants import FoldConstants
-from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
-from finn.transformation.streamline import Streamline
-from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul
-from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount
-import finn.transformation.streamline.absorb as absorb
-from finn.transformation.streamline.reorder import MakeMaxPoolNHWC
-import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
-from finn.transformation.fpgadataflow.create_dataflow_partition import (
-    CreateDataflowPartition,
-)
-from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
-from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
-from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
-from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
-from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
-    ReplaceVerilogRelPaths,
-)
-from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
-from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
-from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
-from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
-from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
-from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject
-from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject
-from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
-from finn.util.basic import pynq_part_map
-from finn.util.test import get_test_model_trained
-from finn.transformation.fpgadataflow.annotate_resources import AnnotateResources
-from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
-from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
-
-build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
-test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
-test_fpga_part = pynq_part_map[test_pynq_board]
-target_clk_ns = 5
-mem_mode = "decoupled"
-
-
-def test_end2end_cnv_w1a1_export():
-    import brevitas.onnx as bo
-
-    cnv = get_test_model_trained("CNV", 1, 1)
-    bo.export_finn_onnx(
-        cnv, (1, 3, 32, 32), build_dir + "/end2end_cnv_w1a1_export.onnx"
-    )
-
-
-def test_end2end_cnv_w1a1_import_and_tidy():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_export.onnx")
-    model = model.transform(DoubleToSingleFloat())
-    model = model.transform(InferShapes())
-    model = model.transform(FoldConstants())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(GiveReadableTensorNames())
-    model.save(build_dir + "/end2end_cnv_w1a1_tidy.onnx")
-
-
-def test_end2end_cnv_w1a1_streamline():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_tidy.onnx")
-    model = model.transform(Streamline())
-    model = model.transform(LowerConvsToMatMul())
-    model = model.transform(MakeMaxPoolNHWC())
-    model = model.transform(absorb.AbsorbTransposeIntoMultiThreshold())
-    model = model.transform(ConvertBipolarMatMulToXnorPopcount())
-    model = model.transform(Streamline())
-    model.save(build_dir + "/end2end_cnv_w1a1_streamlined.onnx")
-
-
-def test_end2end_cnv_w1a1_convert_to_hls_layers():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_streamlined.onnx")
-    model = model.transform(to_hls.InferBinaryStreamingFCLayer(mem_mode))
-    model = model.transform(to_hls.InferQuantizedStreamingFCLayer(mem_mode))
-    model = model.transform(to_hls.InferConvInpGen())
-    model = model.transform(to_hls.InferStreamingMaxPool())
-    model = model.transform(MoveReshape())
-    model.save(build_dir + "/end2end_cnv_w1a1_hls_layers.onnx")
-
-
-def test_end2end_cnv_w1a1_create_dataflow_partition():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_hls_layers.onnx")
-    parent_model = model.transform(CreateDataflowPartition())
-    parent_model.save(build_dir + "/end2end_cnv_w1a1_dataflow_parent.onnx")
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    dataflow_model_filename = sdp_node.get_nodeattr("model")
-    dataflow_model = ModelWrapper(dataflow_model_filename)
-    dataflow_model.save(build_dir + "/end2end_cnv_w1a1_dataflow_model.onnx")
-
-
-def test_end2end_cnv_w1a1_fold_and_tlastmarker():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_dataflow_model.onnx")
-    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
-    # each tuple is (PE, SIMD, in_fifo_depth) for a layer
-    folding = [
-        (16, 3, 128),
-        (32, 32, 128),
-        (16, 32, 128),
-        (16, 32, 128),
-        (4, 32, 81),
-        (1, 32, 2),
-        (1, 4, 2),
-        (1, 8, 128),
-        (5, 1, 3),
-    ]
-    for fcl, (pe, simd, ififodepth) in zip(fc_layers, folding):
-        fcl_inst = getCustomOp(fcl)
-        fcl_inst.set_nodeattr("PE", pe)
-        fcl_inst.set_nodeattr("SIMD", simd)
-        fcl_inst.set_nodeattr("inFIFODepth", ififodepth)
-
-    swg_layers = model.get_nodes_by_op_type("ConvolutionInputGenerator")
-    for i in range(len(swg_layers)):
-        swg_inst = getCustomOp(swg_layers[i])
-        simd = folding[i][1]
-        swg_inst.set_nodeattr("SIMD", simd)
-
-    model = model.transform(InsertDWC())
-    model = model.transform(InsertFIFO())
-    model = model.transform(InsertTLastMarker())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(AnnotateResources("estimate"))
-    model.save(build_dir + "/end2end_cnv_w1a1_folded.onnx")
-
-
-def test_end2end_cnv_w1a1_gen_hls_ip():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_folded.onnx")
-    model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
-    model = model.transform(HLSSynthIP())
-    model = model.transform(AnnotateResources("hls"))
-    model.save(build_dir + "/end2end_cnv_w1a1_ipgen.onnx")
-
-
-def test_end2end_cnv_w1a1_ip_stitch():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_ipgen.onnx")
-    model = model.transform(ReplaceVerilogRelPaths())
-    model = model.transform(CreateStitchedIP(test_fpga_part))
-    model.save(build_dir + "/end2end_cnv_w1a1_ipstitch.onnx")
-
-
-def test_end2end_cnv_w1a1_verify_dataflow_part():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_ipstitch.onnx")
-    x = np.zeros((1, 32, 32, 3), dtype=np.float32)
-    inp_name = model.graph.input[0].name
-    out_name = model.graph.output[0].name
-    inp_dict = {inp_name: x}
-    # cppsim
-    model = model.transform(PrepareCppSim())
-    model = model.transform(CompileCppSim())
-    model = model.transform(SetExecMode("cppsim"))
-    model.save(build_dir + "/end2end_cnv_w1a1_ipgen_cppsim.onnx")
-    ret_cppsim = execute_onnx(model, inp_dict, True)
-    res_cppsim = ret_cppsim[out_name]
-    # node-by-node rtlsim
-    model = model.transform(SetExecMode("rtlsim"))
-    model = model.transform(PrepareRTLSim())
-    model.save(build_dir + "/end2end_cnv_w1a1_ipgen_nodebynode_rtlsim.onnx")
-    ret_rtlsim_nodebynode = execute_onnx(model, inp_dict, True)
-    res_rtlsim_nodebynode = ret_rtlsim_nodebynode[out_name]
-    # whole-network (ip-stitched) rtlsim
-    model.set_metadata_prop("exec_mode", "rtlsim")
-    model.save(build_dir + "/end2end_cnv_w1a1_ipstitch_whole_rtlsim.onnx")
-    # this is a particularly long-running test, set liveness thr. to unlimited
-    os.environ["LIVENESS_THRESHOLD"] = "-1"
-    ret_rtlsim_whole = execute_onnx(model, inp_dict, True)
-    res_rtlsim_whole = ret_rtlsim_whole[out_name]
-    assert np.isclose(res_cppsim, res_rtlsim_nodebynode).all()
-    assert np.isclose(res_cppsim, res_rtlsim_whole).all()
-
-
-def test_end2end_cnv_w1a1_verify_all():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_cnv_w1a1_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    # load one of the test vectors
-    fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
-    input_tensor = np.load(fn)["arr_0"].astype(np.float32)
-    input_tensor = input_tensor / 255
-    assert input_tensor.shape == (1, 3, 32, 32)
-    x = input_tensor
-    # x = np.zeros(ishape, dtype=np.float32)
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    # produce results with cppsim
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    sdp_node.set_nodeattr("model", build_dir + "/end2end_cnv_w1a1_ipgen_cppsim.onnx")
-    ret_cppsim = execute_onnx(parent_model, {iname: x}, True)
-    y_cppsim = ret_cppsim[oname]
-    # produce results with node-by-node rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_cnv_w1a1_ipgen_nodebynode_rtlsim.onnx"
-    )
-    ret_nodebynode_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_nodebynode_rtlsim = ret_nodebynode_rtlsim[oname]
-    # produce results with whole-network (stitched ip) rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_cnv_w1a1_ipstitch_whole_rtlsim.onnx"
-    )
-    # this is a particularly long-running test, set liveness thr. to unlimited
-    os.environ["LIVENESS_THRESHOLD"] = "-1"
-    ret_whole_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_whole_rtlsim = ret_whole_rtlsim[oname]
-    assert np.isclose(y_golden, y_cppsim).all()
-    assert np.isclose(y_golden, y_nodebynode_rtlsim).all()
-    assert np.isclose(y_golden, y_whole_rtlsim).all()
-    assert np.argmax(y_golden) == 3
-
-
-def test_end2end_cnv_w1a1_make_pynq_proj():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_ipstitch.onnx")
-    model = model.transform(MakePYNQProject(test_pynq_board))
-    model.save(build_dir + "/end2end_cnv_w1a1_pynq_project.onnx")
-
-
-def test_end2end_cnv_w1a1_synth_pynq_project():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_pynq_project.onnx")
-    model = model.transform(SynthPYNQProject())
-    model = model.transform(AnnotateResources("synth"))
-    model.save(build_dir + "/end2end_cnv_w1a1_synth.onnx")
-
-
-def test_end2end_cnv_w1a1_make_driver():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_synth.onnx")
-    model = model.transform(MakePYNQDriver())
-    model.save(build_dir + "/end2end_cnv_w1a1_pynq_driver.onnx")
-
-
-def test_end2end_cnv_w1a1_deploy_on_pynq():
-    model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_pynq_driver.onnx")
-    try:
-        ip = os.environ["PYNQ_IP"]  # no fault for this one; skip if not defined
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        username = os.getenv("PYNQ_USERNAME", "xilinx")
-        password = os.getenv("PYNQ_PASSWORD", "xilinx")
-        port = os.getenv("PYNQ_PORT", 22)
-        target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
-        model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))
-        # save the model to be able to link it to the parent
-        model.save(build_dir + "/end2end_cnv_w1a1_pynq_deploy.onnx")
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
-
-
-def test_end2end_cnv_w1a1_run_on_pynq():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_cnv_w1a1_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    # load one of the test vectors
-    fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
-    input_tensor = np.load(fn)["arr_0"].astype(np.float32)
-    input_tensor = input_tensor / 255
-    assert input_tensor.shape == (1, 3, 32, 32)
-    x = input_tensor
-    # run using FINN-based execution
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_cnv_w1a1_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    try:
-        ip = os.environ["PYNQ_IP"]  # NOQA
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        # produce results with cppsim
-        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-        sdp_node = getCustomOp(sdp_node)
-        sdp_node.set_nodeattr("model", build_dir + "/end2end_cnv_w1a1_pynq_deploy.onnx")
-        ret = execute_onnx(parent_model, {iname: x}, True)
-        y = ret[oname]
-        assert np.isclose(y, y_golden).all()
-        assert np.argmax(y) == 3
-
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
diff --git a/tests/end2end/test_end2end_tfc_w1a1_throughput_test.py b/tests/end2end/test_end2end_tfc_w1a1_throughput_test.py
deleted file mode 100644
index b5f3f4e27ff24723db69f887cb7f1cce9c4df617..0000000000000000000000000000000000000000
--- a/tests/end2end/test_end2end_tfc_w1a1_throughput_test.py
+++ /dev/null
@@ -1,307 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-from pkgutil import get_data
-
-import pytest
-
-import numpy as np
-
-# as of Feb'20 there is a bug that segfaults ONNX shape inference if we
-# import pytorch before onnx, so we make sure to import onnx first
-import onnx  # NOQA
-import onnx.numpy_helper as nph
-
-import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
-import finn.transformation.streamline.absorb as absorb
-from finn.core.modelwrapper import ModelWrapper
-from finn.core.onnx_exec import execute_onnx
-from finn.core.throughput_test import throughput_test
-from finn.custom_op.registry import getCustomOp
-from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount
-from finn.transformation.fold_constants import FoldConstants
-from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
-from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
-from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
-from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
-from finn.transformation.fpgadataflow.create_dataflow_partition import (
-    CreateDataflowPartition,
-)
-from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
-from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
-from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
-from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
-from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
-from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
-from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject
-from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
-    ReplaceVerilogRelPaths,
-)
-from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
-from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject
-from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
-from finn.transformation.infer_datatypes import InferDataTypes
-from finn.transformation.infer_shapes import InferShapes
-from finn.transformation.streamline import Streamline
-from finn.transformation.streamline.round_thresholds import RoundAndClipThresholds
-from finn.util.basic import pynq_part_map
-from finn.util.test import get_test_model_trained
-from finn.transformation.fpgadataflow.annotate_resources import AnnotateResources
-from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
-
-build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
-test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
-test_fpga_part = pynq_part_map[test_pynq_board]
-target_clk_ns = 10
-mem_mode = "decoupled"
-
-
-def test_end2end_tfc_w1a1_export():
-    import brevitas.onnx as bo
-
-    tfc = get_test_model_trained("TFC", 1, 1)
-    bo.export_finn_onnx(
-        tfc, (1, 1, 28, 28), build_dir + "/end2end_tfc_w1a1_export.onnx"
-    )
-
-
-def test_end2end_tfc_w1a1_import_and_tidy():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_export.onnx")
-    model = model.transform(InferShapes())
-    model = model.transform(FoldConstants())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(GiveReadableTensorNames())
-    model = model.transform(InferDataTypes())
-    model.save(build_dir + "/end2end_tfc_w1a1_tidy.onnx")
-
-
-def test_end2end_tfc_w1a1_streamline():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_tidy.onnx")
-    model = model.transform(Streamline())
-    model.save(build_dir + "/end2end_tfc_w1a1_streamlined.onnx")
-
-
-def test_end2end_tfc_w1a1_convert_to_hls_layers():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_streamlined.onnx")
-    model = model.transform(ConvertBipolarMatMulToXnorPopcount())
-    model = model.transform(absorb.AbsorbAddIntoMultiThreshold())
-    model = model.transform(absorb.AbsorbMulIntoMultiThreshold())
-    model = model.transform(RoundAndClipThresholds())
-    model = model.transform(to_hls.InferBinaryStreamingFCLayer(mem_mode))
-    model.save(build_dir + "/end2end_tfc_w1a1_hls_layers.onnx")
-
-
-def test_end2end_tfc_w1a1_create_dataflow_partition():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_hls_layers.onnx")
-    parent_model = model.transform(CreateDataflowPartition())
-    parent_model.save(build_dir + "/end2end_tfc_w1a1_dataflow_parent.onnx")
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    dataflow_model_filename = sdp_node.get_nodeattr("model")
-    dataflow_model = ModelWrapper(dataflow_model_filename)
-    dataflow_model.save(build_dir + "/end2end_tfc_w1a1_dataflow_model.onnx")
-
-
-def test_end2end_tfc_w1a1_fold_and_tlastmarker():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_dataflow_model.onnx")
-    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
-    # (PE, SIMD, in_fifo_depth, out_fifo_depth, ramstyle) for each layer
-    config = [
-        (16, 49, 16, 64, "block"),
-        (8, 8, 64, 64, "auto"),
-        (8, 8, 64, 64, "auto"),
-        (10, 8, 64, 10, "distributed"),
-    ]
-    for fcl, (pe, simd, ififo, ofifo, ramstyle) in zip(fc_layers, config):
-        fcl_inst = getCustomOp(fcl)
-        fcl_inst.set_nodeattr("PE", pe)
-        fcl_inst.set_nodeattr("SIMD", simd)
-        fcl_inst.set_nodeattr("inFIFODepth", ififo)
-        fcl_inst.set_nodeattr("outFIFODepth", ofifo)
-        fcl_inst.set_nodeattr("ram_style", ramstyle)
-    model = model.transform(InsertDWC())
-    model = model.transform(InsertFIFO())
-    model = model.transform(InsertTLastMarker())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(AnnotateResources("estimate"))
-    model.save(build_dir + "/end2end_tfc_w1a1_folded.onnx")
-
-
-def test_end2end_tfc_w1a1_gen_hls_ip():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_folded.onnx")
-    model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
-    model = model.transform(HLSSynthIP())
-    model = model.transform(AnnotateResources("hls"))
-    model.save(build_dir + "/end2end_tfc_w1a1_ipgen.onnx")
-
-
-def test_end2end_tfc_w1a1_ip_stitch():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_ipgen.onnx")
-    model = model.transform(ReplaceVerilogRelPaths())
-    model = model.transform(CreateStitchedIP(test_fpga_part))
-    model.save(build_dir + "/end2end_tfc_w1a1_ipstitch.onnx")
-
-
-def test_end2end_tfc_w1a1_verify_dataflow_part():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_ipstitch.onnx")
-    x = np.zeros((1, 784), dtype=np.float32)
-    inp_name = model.graph.input[0].name
-    out_name = model.graph.output[0].name
-    inp_dict = {inp_name: x}
-    # cppsim
-    model = model.transform(PrepareCppSim())
-    model = model.transform(CompileCppSim())
-    model = model.transform(SetExecMode("cppsim"))
-    model.save(build_dir + "/end2end_tfc_w1a1_ipstitch_cppsim.onnx")
-    ret_cppsim = execute_onnx(model, inp_dict, True)
-    res_cppsim = ret_cppsim[out_name]
-    # node-by-node rtlsim
-    model = model.transform(SetExecMode("rtlsim"))
-    model = model.transform(PrepareRTLSim())
-    model.save(build_dir + "/end2end_tfc_w1a1_ipstitch_nodebynode_rtlsim.onnx")
-    ret_rtlsim_nodebynode = execute_onnx(model, inp_dict, True)
-    res_rtlsim_nodebynode = ret_rtlsim_nodebynode[out_name]
-    # whole-network (ip-stitched) rtlsim
-    model.set_metadata_prop("exec_mode", "rtlsim")
-    model.save(build_dir + "/end2end_tfc_w1a1_ipstitch_whole_rtlsim.onnx")
-    ret_rtlsim_whole = execute_onnx(model, inp_dict, True)
-    res_rtlsim_whole = ret_rtlsim_whole[out_name]
-    assert np.isclose(res_cppsim, res_rtlsim_nodebynode).all()
-    assert np.isclose(res_cppsim, res_rtlsim_whole).all()
-
-
-def test_end2end_tfc_w1a1_verify_all():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_tfc_w1a1_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
-    input_tensor = onnx.load_tensor_from_string(raw_i)
-    x = nph.to_array(input_tensor)
-    # x = np.zeros(ishape, dtype=np.float32)
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    # produce results with cppsim
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a1_ipstitch_cppsim.onnx")
-    ret_cppsim = execute_onnx(parent_model, {iname: x}, True)
-    y_cppsim = ret_cppsim[oname]
-    # produce results with node-by-node rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_tfc_w1a1_ipstitch_nodebynode_rtlsim.onnx"
-    )
-    ret_nodebynode_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_nodebynode_rtlsim = ret_nodebynode_rtlsim[oname]
-    # produce results with whole-network (stitched ip) rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_tfc_w1a1_ipstitch_whole_rtlsim.onnx"
-    )
-    ret_whole_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_whole_rtlsim = ret_whole_rtlsim[oname]
-    assert np.isclose(y_golden, y_cppsim).all()
-    assert np.isclose(y_golden, y_nodebynode_rtlsim).all()
-    assert np.isclose(y_golden, y_whole_rtlsim).all()
-
-
-def test_end2end_tfc_w1a1_make_pynq_proj():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_ipstitch.onnx")
-    model = model.transform(MakePYNQProject(test_pynq_board))
-    model.save(build_dir + "/end2end_tfc_w1a1_pynq_project.onnx")
-
-
-def test_end2end_tfc_w1a1_synth_pynq_project():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_pynq_project.onnx")
-    model = model.transform(SynthPYNQProject())
-    model = model.transform(AnnotateResources("synth"))
-    model.save(build_dir + "/end2end_tfc_w1a1_synth.onnx")
-
-
-def test_end2end_tfc_w1a1_make_driver():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_synth.onnx")
-    model = model.transform(MakePYNQDriver())
-    model.save(build_dir + "/end2end_tfc_w1a1_pynq_driver.onnx")
-
-
-def test_end2end_tfc_w1a1_deploy_on_pynq():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_pynq_driver.onnx")
-    try:
-        ip = os.environ["PYNQ_IP"]  # no fault for this one; skip if not defined
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        username = os.getenv("PYNQ_USERNAME", "xilinx")
-        password = os.getenv("PYNQ_PASSWORD", "xilinx")
-        port = os.getenv("PYNQ_PORT", 22)
-        target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
-        model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))
-        # save the model to be able to link it to the parent
-        model.save(build_dir + "/end2end_tfc_w1a1_pynq_deploy.onnx")
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
-
-
-def test_end2end_tfc_w1a1_run_on_pynq():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_tfc_w1a1_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
-    input_tensor = onnx.load_tensor_from_string(raw_i)
-    x = nph.to_array(input_tensor)
-    # x = np.zeros(ishape, dtype=np.float32)
-    # run using FINN-based execution
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    try:
-        ip = os.environ["PYNQ_IP"]  # NOQA
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        # produce results with cppsim
-        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-        sdp_node = getCustomOp(sdp_node)
-        sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a1_pynq_deploy.onnx")
-        ret = execute_onnx(parent_model, {iname: x}, True)
-        y = ret[oname]
-        assert np.isclose(y, y_golden).all()
-        child_model = ModelWrapper(sdp_node.get_nodeattr("model"))
-        res = throughput_test(child_model)
-        assert res is not None
-
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
diff --git a/tests/end2end/test_end2end_tfc_w1a2.py b/tests/end2end/test_end2end_tfc_w1a2.py
deleted file mode 100644
index ecc0d48a6af37bc2bdd48f9306976aa8582ca1b0..0000000000000000000000000000000000000000
--- a/tests/end2end/test_end2end_tfc_w1a2.py
+++ /dev/null
@@ -1,296 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-from pkgutil import get_data
-
-import pytest
-
-import numpy as np
-
-# as of Feb'20 there is a bug that segfaults ONNX shape inference if we
-# import pytorch before onnx, so we make sure to import onnx first
-import onnx  # NOQA
-import onnx.numpy_helper as nph
-
-import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
-from finn.core.modelwrapper import ModelWrapper
-from finn.core.onnx_exec import execute_onnx
-from finn.custom_op.registry import getCustomOp
-from finn.transformation.fold_constants import FoldConstants
-from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
-from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
-from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
-from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
-from finn.transformation.fpgadataflow.create_dataflow_partition import (
-    CreateDataflowPartition,
-)
-from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
-from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
-from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
-from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
-from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
-from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
-from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject
-from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
-    ReplaceVerilogRelPaths,
-)
-from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
-from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject
-from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
-from finn.transformation.infer_datatypes import InferDataTypes
-from finn.transformation.infer_shapes import InferShapes
-from finn.transformation.streamline import Streamline
-from finn.util.basic import pynq_part_map
-from finn.util.test import get_test_model_trained
-from finn.transformation.fpgadataflow.annotate_resources import AnnotateResources
-from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
-
-build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
-test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
-test_fpga_part = pynq_part_map[test_pynq_board]
-target_clk_ns = 10
-mem_mode = "decoupled"
-
-
-def test_end2end_tfc_w1a2_export():
-    import brevitas.onnx as bo
-
-    tfc = get_test_model_trained("TFC", 1, 2)
-    bo.export_finn_onnx(
-        tfc, (1, 1, 28, 28), build_dir + "/end2end_tfc_w1a2_export.onnx"
-    )
-
-
-def test_end2end_tfc_w1a2_import_and_tidy():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_export.onnx")
-    model = model.transform(InferShapes())
-    model = model.transform(FoldConstants())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(GiveReadableTensorNames())
-    model = model.transform(InferDataTypes())
-    model.save(build_dir + "/end2end_tfc_w1a2_tidy.onnx")
-
-
-def test_end2end_tfc_w1a2_streamline():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_tidy.onnx")
-    model = model.transform(Streamline())
-    model.save(build_dir + "/end2end_tfc_w1a2_streamlined.onnx")
-
-
-def test_end2end_tfc_w1a2_convert_to_hls_layers():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_streamlined.onnx")
-    model = model.transform(to_hls.InferQuantizedStreamingFCLayer(mem_mode))
-    model.save(build_dir + "/end2end_tfc_w1a2_hls_layers.onnx")
-
-
-def test_end2end_tfc_w1a2_create_dataflow_partition():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_hls_layers.onnx")
-    parent_model = model.transform(CreateDataflowPartition())
-    parent_model.save(build_dir + "/end2end_tfc_w1a2_dataflow_parent.onnx")
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    dataflow_model_filename = sdp_node.get_nodeattr("model")
-    dataflow_model = ModelWrapper(dataflow_model_filename)
-    dataflow_model.save(build_dir + "/end2end_tfc_w1a2_dataflow_model.onnx")
-
-
-def test_end2end_tfc_w1a2_fold_and_tlastmarker():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_dataflow_model.onnx")
-    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
-    # (PE, SIMD, in_fifo_depth, out_fifo_depth, ramstyle) for each layer
-    config = [
-        (16, 49, 16, 64, "block"),
-        (8, 8, 64, 64, "auto"),
-        (8, 8, 64, 64, "auto"),
-        (10, 8, 64, 10, "distributed"),
-    ]
-    for fcl, (pe, simd, ififo, ofifo, ramstyle) in zip(fc_layers, config):
-        fcl_inst = getCustomOp(fcl)
-        fcl_inst.set_nodeattr("PE", pe)
-        fcl_inst.set_nodeattr("SIMD", simd)
-        fcl_inst.set_nodeattr("inFIFODepth", ififo)
-        fcl_inst.set_nodeattr("outFIFODepth", ofifo)
-        fcl_inst.set_nodeattr("ram_style", ramstyle)
-    model = model.transform(InsertDWC())
-    model = model.transform(InsertFIFO())
-    model = model.transform(InsertTLastMarker())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(AnnotateResources("estimate"))
-    model.save(build_dir + "/end2end_tfc_w1a2_folded.onnx")
-
-
-def test_end2end_tfc_w1a2_gen_hls_ip():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_folded.onnx")
-    model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
-    model = model.transform(HLSSynthIP())
-    model = model.transform(AnnotateResources("hls"))
-    model.save(build_dir + "/end2end_tfc_w1a2_ipgen.onnx")
-
-
-def test_end2end_tfc_w1a2_ip_stitch():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_ipgen.onnx")
-    model = model.transform(ReplaceVerilogRelPaths())
-    model = model.transform(CreateStitchedIP(test_fpga_part))
-    model.save(build_dir + "/end2end_tfc_w1a2_ipstitch.onnx")
-
-
-def test_end2end_tfc_w1a2_verify_dataflow_part():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_ipstitch.onnx")
-    x = np.zeros((1, 784), dtype=np.float32)
-    inp_name = model.graph.input[0].name
-    out_name = model.graph.output[0].name
-    inp_dict = {inp_name: x}
-    # cppsim
-    model = model.transform(PrepareCppSim())
-    model = model.transform(CompileCppSim())
-    model = model.transform(SetExecMode("cppsim"))
-    model.save(build_dir + "/end2end_tfc_w1a2_ipstitch_cppsim.onnx")
-    ret_cppsim = execute_onnx(model, inp_dict, True)
-    res_cppsim = ret_cppsim[out_name]
-    # node-by-node rtlsim
-    model = model.transform(SetExecMode("rtlsim"))
-    model = model.transform(PrepareRTLSim())
-    model.save(build_dir + "/end2end_tfc_w1a2_ipstitch_nodebynode_rtlsim.onnx")
-    ret_rtlsim_nodebynode = execute_onnx(model, inp_dict, True)
-    res_rtlsim_nodebynode = ret_rtlsim_nodebynode[out_name]
-    # whole-network (ip-stitched) rtlsim
-    model.set_metadata_prop("exec_mode", "rtlsim")
-    model.save(build_dir + "/end2end_tfc_w1a2_ipstitch_whole_rtlsim.onnx")
-    ret_rtlsim_whole = execute_onnx(model, inp_dict, True)
-    res_rtlsim_whole = ret_rtlsim_whole[out_name]
-    assert np.isclose(res_cppsim, res_rtlsim_nodebynode).all()
-    assert np.isclose(res_cppsim, res_rtlsim_whole).all()
-
-
-def test_end2end_tfc_w1a2_verify_all():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_tfc_w1a2_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
-    input_tensor = onnx.load_tensor_from_string(raw_i)
-    x = nph.to_array(input_tensor)
-    # x = np.zeros(ishape, dtype=np.float32)
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    # produce results with cppsim
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a2_ipstitch_cppsim.onnx")
-    ret_cppsim = execute_onnx(parent_model, {iname: x}, True)
-    y_cppsim = ret_cppsim[oname]
-    # produce results with node-by-node rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_tfc_w1a2_ipstitch_nodebynode_rtlsim.onnx"
-    )
-    ret_nodebynode_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_nodebynode_rtlsim = ret_nodebynode_rtlsim[oname]
-    # produce results with whole-network (stitched ip) rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_tfc_w1a2_ipstitch_whole_rtlsim.onnx"
-    )
-    ret_whole_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_whole_rtlsim = ret_whole_rtlsim[oname]
-    assert np.isclose(y_golden, y_cppsim).all()
-    assert np.isclose(y_golden, y_nodebynode_rtlsim).all()
-    assert np.isclose(y_golden, y_whole_rtlsim).all()
-
-
-def test_end2end_tfc_w1a2_make_pynq_proj():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_ipstitch.onnx")
-    model = model.transform(MakePYNQProject(test_pynq_board))
-    model.save(build_dir + "/end2end_tfc_w1a2_pynq_project.onnx")
-
-
-def test_end2end_tfc_w1a2_synth_pynq_project():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_pynq_project.onnx")
-    model = model.transform(SynthPYNQProject())
-    model = model.transform(AnnotateResources("synth"))
-    model.save(build_dir + "/end2end_tfc_w1a2_synth.onnx")
-
-
-def test_end2end_tfc_w1a2_make_driver():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_synth.onnx")
-    model = model.transform(MakePYNQDriver())
-    model.save(build_dir + "/end2end_tfc_w1a2_pynq_driver.onnx")
-
-
-def test_end2end_tfc_w1a2_deploy_on_pynq():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_pynq_driver.onnx")
-    try:
-        ip = os.environ["PYNQ_IP"]  # no fault for this one; skip if not defined
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        username = os.getenv("PYNQ_USERNAME", "xilinx")
-        password = os.getenv("PYNQ_PASSWORD", "xilinx")
-        port = os.getenv("PYNQ_PORT", 22)
-        target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
-        model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))
-        # save the model to be able to link it to the parent
-        model.save(build_dir + "/end2end_tfc_w1a2_pynq_deploy.onnx")
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
-
-
-def test_end2end_tfc_w1a2_run_on_pynq():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_tfc_w1a2_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
-    input_tensor = onnx.load_tensor_from_string(raw_i)
-    x = nph.to_array(input_tensor)
-    # x = np.zeros(ishape, dtype=np.float32)
-    # run using FINN-based execution
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    try:
-        ip = os.environ["PYNQ_IP"]  # NOQA
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        # produce results with cppsim
-        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-        sdp_node = getCustomOp(sdp_node)
-        sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a2_pynq_deploy.onnx")
-        ret = execute_onnx(parent_model, {iname: x}, True)
-        y = ret[oname]
-        assert np.isclose(y, y_golden).all()
-
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
diff --git a/tests/end2end/test_end2end_tfc_w2a2.py b/tests/end2end/test_end2end_tfc_w2a2.py
deleted file mode 100644
index 8c13352d9e9d146d58d76b1cf1e17878f27513f5..0000000000000000000000000000000000000000
--- a/tests/end2end/test_end2end_tfc_w2a2.py
+++ /dev/null
@@ -1,296 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-from pkgutil import get_data
-
-import pytest
-
-import numpy as np
-
-# as of Feb'20 there is a bug that segfaults ONNX shape inference if we
-# import pytorch before onnx, so we make sure to import onnx first
-import onnx  # NOQA
-import onnx.numpy_helper as nph
-
-import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
-from finn.core.modelwrapper import ModelWrapper
-from finn.core.onnx_exec import execute_onnx
-from finn.custom_op.registry import getCustomOp
-from finn.transformation.fold_constants import FoldConstants
-from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
-from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
-from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
-from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
-from finn.transformation.fpgadataflow.create_dataflow_partition import (
-    CreateDataflowPartition,
-)
-from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
-from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
-from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
-from finn.transformation.fpgadataflow.insert_fifo import InsertFIFO
-from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
-from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
-from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject
-from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
-    ReplaceVerilogRelPaths,
-)
-from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
-from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject
-from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
-from finn.transformation.infer_datatypes import InferDataTypes
-from finn.transformation.infer_shapes import InferShapes
-from finn.transformation.streamline import Streamline
-from finn.util.basic import pynq_part_map
-from finn.util.test import get_test_model_trained
-from finn.transformation.fpgadataflow.annotate_resources import AnnotateResources
-from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
-
-build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
-test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
-test_fpga_part = pynq_part_map[test_pynq_board]
-target_clk_ns = 10
-mem_mode = "decoupled"
-
-
-def test_end2end_tfc_w2a2_export():
-    import brevitas.onnx as bo
-
-    tfc = get_test_model_trained("TFC", 2, 2)
-    bo.export_finn_onnx(
-        tfc, (1, 1, 28, 28), build_dir + "/end2end_tfc_w2a2_export.onnx"
-    )
-
-
-def test_end2end_tfc_w2a2_import_and_tidy():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_export.onnx")
-    model = model.transform(InferShapes())
-    model = model.transform(FoldConstants())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(GiveReadableTensorNames())
-    model = model.transform(InferDataTypes())
-    model.save(build_dir + "/end2end_tfc_w2a2_tidy.onnx")
-
-
-def test_end2end_tfc_w2a2_streamline():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_tidy.onnx")
-    model = model.transform(Streamline())
-    model.save(build_dir + "/end2end_tfc_w2a2_streamlined.onnx")
-
-
-def test_end2end_tfc_w2a2_convert_to_hls_layers():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_streamlined.onnx")
-    model = model.transform(to_hls.InferQuantizedStreamingFCLayer(mem_mode))
-    model.save(build_dir + "/end2end_tfc_w2a2_hls_layers.onnx")
-
-
-def test_end2end_tfc_w2a2_create_dataflow_partition():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_hls_layers.onnx")
-    parent_model = model.transform(CreateDataflowPartition())
-    parent_model.save(build_dir + "/end2end_tfc_w2a2_dataflow_parent.onnx")
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    dataflow_model_filename = sdp_node.get_nodeattr("model")
-    dataflow_model = ModelWrapper(dataflow_model_filename)
-    dataflow_model.save(build_dir + "/end2end_tfc_w2a2_dataflow_model.onnx")
-
-
-def test_end2end_tfc_w2a2_fold_and_tlastmarker():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_dataflow_model.onnx")
-    fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
-    # (PE, SIMD, in_fifo_depth, out_fifo_depth, ramstyle) for each layer
-    config = [
-        (16, 49, 16, 64, "block"),
-        (8, 8, 64, 64, "auto"),
-        (8, 8, 64, 64, "auto"),
-        (10, 8, 64, 10, "distributed"),
-    ]
-    for fcl, (pe, simd, ififo, ofifo, ramstyle) in zip(fc_layers, config):
-        fcl_inst = getCustomOp(fcl)
-        fcl_inst.set_nodeattr("PE", pe)
-        fcl_inst.set_nodeattr("SIMD", simd)
-        fcl_inst.set_nodeattr("inFIFODepth", ififo)
-        fcl_inst.set_nodeattr("outFIFODepth", ofifo)
-        fcl_inst.set_nodeattr("ram_style", ramstyle)
-    model = model.transform(InsertDWC())
-    model = model.transform(InsertFIFO())
-    model = model.transform(InsertTLastMarker())
-    model = model.transform(GiveUniqueNodeNames())
-    model = model.transform(AnnotateResources("estimate"))
-    model.save(build_dir + "/end2end_tfc_w2a2_folded.onnx")
-
-
-def test_end2end_tfc_w2a2_gen_hls_ip():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_folded.onnx")
-    model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
-    model = model.transform(HLSSynthIP())
-    model = model.transform(AnnotateResources("hls"))
-    model.save(build_dir + "/end2end_tfc_w2a2_ipgen.onnx")
-
-
-def test_end2end_tfc_w2a2_ip_stitch():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_ipgen.onnx")
-    model = model.transform(ReplaceVerilogRelPaths())
-    model = model.transform(CreateStitchedIP(test_fpga_part))
-    model.save(build_dir + "/end2end_tfc_w2a2_ipstitch.onnx")
-
-
-def test_end2end_tfc_w2a2_verify_dataflow_part():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_ipstitch.onnx")
-    x = np.zeros((1, 784), dtype=np.float32)
-    inp_name = model.graph.input[0].name
-    out_name = model.graph.output[0].name
-    inp_dict = {inp_name: x}
-    # cppsim
-    model = model.transform(PrepareCppSim())
-    model = model.transform(CompileCppSim())
-    model = model.transform(SetExecMode("cppsim"))
-    model.save(build_dir + "/end2end_tfc_w2a2_ipstitch_cppsim.onnx")
-    ret_cppsim = execute_onnx(model, inp_dict, True)
-    res_cppsim = ret_cppsim[out_name]
-    # node-by-node rtlsim
-    model = model.transform(SetExecMode("rtlsim"))
-    model = model.transform(PrepareRTLSim())
-    model.save(build_dir + "/end2end_tfc_w2a2_ipstitch_nodebynode_rtlsim.onnx")
-    ret_rtlsim_nodebynode = execute_onnx(model, inp_dict, True)
-    res_rtlsim_nodebynode = ret_rtlsim_nodebynode[out_name]
-    # whole-network (ip-stitched) rtlsim
-    model.set_metadata_prop("exec_mode", "rtlsim")
-    model.save(build_dir + "/end2end_tfc_w2a2_ipstitch_whole_rtlsim.onnx")
-    ret_rtlsim_whole = execute_onnx(model, inp_dict, True)
-    res_rtlsim_whole = ret_rtlsim_whole[out_name]
-    assert np.isclose(res_cppsim, res_rtlsim_nodebynode).all()
-    assert np.isclose(res_cppsim, res_rtlsim_whole).all()
-
-
-def test_end2end_tfc_w2a2_verify_all():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_tfc_w2a2_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
-    input_tensor = onnx.load_tensor_from_string(raw_i)
-    x = nph.to_array(input_tensor)
-    # x = np.zeros(ishape, dtype=np.float32)
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    # produce results with cppsim
-    sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-    sdp_node = getCustomOp(sdp_node)
-    sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w2a2_ipstitch_cppsim.onnx")
-    ret_cppsim = execute_onnx(parent_model, {iname: x}, True)
-    y_cppsim = ret_cppsim[oname]
-    # produce results with node-by-node rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_tfc_w2a2_ipstitch_nodebynode_rtlsim.onnx"
-    )
-    ret_nodebynode_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_nodebynode_rtlsim = ret_nodebynode_rtlsim[oname]
-    # produce results with whole-network (stitched ip) rtlsim
-    sdp_node.set_nodeattr(
-        "model", build_dir + "/end2end_tfc_w2a2_ipstitch_whole_rtlsim.onnx"
-    )
-    ret_whole_rtlsim = execute_onnx(parent_model, {iname: x}, True)
-    y_whole_rtlsim = ret_whole_rtlsim[oname]
-    assert np.isclose(y_golden, y_cppsim).all()
-    assert np.isclose(y_golden, y_nodebynode_rtlsim).all()
-    assert np.isclose(y_golden, y_whole_rtlsim).all()
-
-
-def test_end2end_tfc_w2a2_make_pynq_proj():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_ipstitch.onnx")
-    model = model.transform(MakePYNQProject(test_pynq_board))
-    model.save(build_dir + "/end2end_tfc_w2a2_pynq_project.onnx")
-
-
-def test_end2end_tfc_w2a2_synth_pynq_project():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_pynq_project.onnx")
-    model = model.transform(SynthPYNQProject())
-    model = model.transform(AnnotateResources("synth"))
-    model.save(build_dir + "/end2end_tfc_w2a2_synth.onnx")
-
-
-def test_end2end_tfc_w2a2_make_driver():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_synth.onnx")
-    model = model.transform(MakePYNQDriver())
-    model.save(build_dir + "/end2end_tfc_w2a2_pynq_driver.onnx")
-
-
-def test_end2end_tfc_w2a2_deploy_on_pynq():
-    model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_pynq_driver.onnx")
-    try:
-        ip = os.environ["PYNQ_IP"]  # no fault for this one; skip if not defined
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        username = os.getenv("PYNQ_USERNAME", "xilinx")
-        password = os.getenv("PYNQ_PASSWORD", "xilinx")
-        port = os.getenv("PYNQ_PORT", 22)
-        target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
-        model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))
-        # save the model to be able to link it to the parent
-        model.save(build_dir + "/end2end_tfc_w2a2_pynq_deploy.onnx")
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
-
-
-def test_end2end_tfc_w2a2_run_on_pynq():
-    # use the streamlined model as the "golden" model for right answers
-    golden = ModelWrapper(build_dir + "/end2end_tfc_w2a2_streamlined.onnx")
-    iname = golden.graph.input[0].name
-    oname = golden.graph.output[0].name
-    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
-    input_tensor = onnx.load_tensor_from_string(raw_i)
-    x = nph.to_array(input_tensor)
-    # x = np.zeros(ishape, dtype=np.float32)
-    # run using FINN-based execution
-    ret_golden = execute_onnx(golden, {iname: x}, True)
-    y_golden = ret_golden[oname]
-    # set up parent+child graph to test
-    # we'll use models from the previous step as the child model
-    parent_model = ModelWrapper(build_dir + "/end2end_tfc_w2a2_dataflow_parent.onnx")
-    iname = parent_model.graph.input[0].name
-    oname = parent_model.graph.output[0].name
-    try:
-        ip = os.environ["PYNQ_IP"]  # NOQA
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        # produce results with cppsim
-        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-        sdp_node = getCustomOp(sdp_node)
-        sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w2a2_pynq_deploy.onnx")
-        ret = execute_onnx(parent_model, {iname: x}, True)
-        y = ret[oname]
-        assert np.isclose(y, y_golden).all()
-
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
diff --git a/tests/fpgadataflow/test_code_gen_trafo.py b/tests/fpgadataflow/test_code_gen_trafo.py
index 1228a9c79608a1c7eb44900ddb7df54ed900a3c2..24933759830535dfcec768d47a6020b4f3e2de35 100644
--- a/tests/fpgadataflow/test_code_gen_trafo.py
+++ b/tests/fpgadataflow/test_code_gen_trafo.py
@@ -29,13 +29,14 @@
 import os
 
 from onnx import TensorProto, helper
-
+import pytest
 import finn.util.basic as util
 from finn.core.datatype import DataType
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
 
 
+@pytest.mark.vivado
 def test_code_gen_trafo():
     idt = wdt = odt = DataType.BIPOLAR
     mw = 8
diff --git a/tests/fpgadataflow/test_compilation_trafo.py b/tests/fpgadataflow/test_compilation_trafo.py
index 35eed02f4e71a96f9f4e8957c372f93e6cd7927c..65894e02e490f6931e5b03a9aa67b8f22e32583a 100644
--- a/tests/fpgadataflow/test_compilation_trafo.py
+++ b/tests/fpgadataflow/test_compilation_trafo.py
@@ -30,6 +30,7 @@ import os
 
 from onnx import TensorProto, helper
 
+import pytest
 import finn.util.basic as util
 from finn.core.datatype import DataType
 from finn.core.modelwrapper import ModelWrapper
@@ -37,6 +38,7 @@ from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
 from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
 
 
+@pytest.mark.vivado
 def test_compilation_trafo():
     idt = wdt = odt = DataType.BIPOLAR
     mw = 8
diff --git a/tests/fpgadataflow/test_convert_to_hls_channelwise_layer.py b/tests/fpgadataflow/test_convert_to_hls_channelwise_layer.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ff2bedb96613b94310fc743d39fc2f4d9618677
--- /dev/null
+++ b/tests/fpgadataflow/test_convert_to_hls_channelwise_layer.py
@@ -0,0 +1,111 @@
+import pytest
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.util.basic import gen_finn_dt_tensor
+from finn.transformation.infer_shapes import InferShapes
+import numpy as np
+
+
+def prepare_inputs(input_tensor):
+    return {"inp": input_tensor}
+
+
+def make_single_maxpool_modelwrapper(onnx_op_name, ishape, idt, pdt, pshape):
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, ishape)
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, ishape)
+    p0 = helper.make_tensor_value_info("p0", TensorProto.FLOAT, pshape)
+
+    model = helper.make_model(
+        helper.make_graph(
+            name="test",
+            inputs=[inp],
+            outputs=[outp],
+            value_info=[p0],
+            nodes=[helper.make_node(onnx_op_name, ["inp", "p0"], ["outp"])],
+        )
+    )
+
+    model = ModelWrapper(model)
+    model.set_initializer("p0", gen_finn_dt_tensor(pdt, pshape))
+    model.set_tensor_datatype("inp", idt)
+    model.transform(InferDataLayouts(), make_deepcopy=False)
+    model.transform(InferShapes(), make_deepcopy=False)
+    return model
+
+
+# parameter datatype
+@pytest.mark.parametrize("pdt", [DataType.BIPOLAR, DataType.UINT4, DataType.INT2])
+# input datatype
+@pytest.mark.parametrize("idt", [DataType.INT32, DataType.UINT4, DataType.INT4])
+# function
+@pytest.mark.parametrize("onnx_op_name", ["Add", "Mul"])
+# vector parameter or scalar parameter (broadcast)
+@pytest.mark.parametrize("scalar_param", [True, False])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+@pytest.mark.slow
+def test_convert_to_hls_channelwise_layer(
+    pdt, idt, onnx_op_name, scalar_param, exec_mode
+):
+    ifm_ch = 16
+    ifm_dim = 5
+    ishape = (1, ifm_ch, ifm_dim, ifm_dim)
+    if scalar_param:
+        pshape = (1,)
+    else:
+        pshape = (1, ifm_ch, 1, 1)
+
+    np.random.seed(0)
+    model = make_single_maxpool_modelwrapper(onnx_op_name, ishape, idt, pdt, pshape)
+
+    # Since the aren't Data types with a bit width of a non power of 2,
+    # there are cases where the input won't use it full range.
+    if idt == DataType.INT32:
+        x = gen_finn_dt_tensor(DataType.INT16, (1, ifm_ch, ifm_dim, ifm_dim))
+    elif idt == DataType.UINT32:
+        x = gen_finn_dt_tensor(DataType.UINT16, (1, ifm_ch, ifm_dim, ifm_dim))
+    else:
+        x = gen_finn_dt_tensor(idt, (1, ifm_ch, ifm_dim, ifm_dim))
+
+    input_dict = prepare_inputs(x)
+    y_expected = oxe.execute_onnx(model, input_dict)["outp"]
+
+    new_model = model.transform(to_hls.InferChannelwiseLinearLayer())
+    new_model = new_model.transform(GiveUniqueNodeNames())
+
+    if exec_mode == "cppsim":
+        new_model = new_model.transform(PrepareCppSim())
+        new_model = new_model.transform(CompileCppSim())
+        new_model = new_model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        new_model = new_model.transform(SetExecMode("rtlsim"))
+        new_model = new_model.transform(GiveUniqueNodeNames())
+        new_model = new_model.transform(PrepareIP("xc7z020clg400-1", 5))
+        new_model = new_model.transform(HLSSynthIP())
+        new_model = new_model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    ctx_produced = oxe.execute_onnx(
+        new_model, input_dict, return_full_exec_context=True
+    )
+    y_produced = ctx_produced["outp"]
+
+    assert (y_produced == y_expected).all()
+    assert new_model.graph.node[1].op_type == "ChannelwiseOp_Batch"
diff --git a/tests/fpgadataflow/test_convert_to_hls_conv_layer.py b/tests/fpgadataflow/test_convert_to_hls_conv_layer.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4817d70ce3080738e7b7321bfc686b73ad55fe1
--- /dev/null
+++ b/tests/fpgadataflow/test_convert_to_hls_conv_layer.py
@@ -0,0 +1,151 @@
+from onnx import TensorProto, helper
+import numpy as np
+import pytest
+
+from finn.core.datatype import DataType
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul
+
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+import finn.core.onnx_exec as oxe
+from finn.core.modelwrapper import ModelWrapper
+from finn.util.basic import gen_finn_dt_tensor
+import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
+
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.custom_op.im2col import compute_conv_output_dim
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+# conv_config  kernel_size,stride, pad
+
+
+@pytest.mark.parametrize(
+    "conv_config", [(1, 2, 0), (1, 3, 0), (3, 2, 1), (3, 1, 0), (3, 1, 1), (5, 2, 1)]
+)
+@pytest.mark.parametrize("depthwise", [False, True])
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_convert_to_hls_conv_layer(conv_config, depthwise, exec_mode):
+    kernel_size, stride, pad = conv_config
+    np.random.seed(0)
+    idt = DataType.UINT4
+
+    in_feature_dim = 7
+    in_chn = 16
+
+    if depthwise is True:
+        group = out_chn = in_chn
+        conv_param_shape = [out_chn, 1, kernel_size, kernel_size]
+    else:
+        group = 1
+        out_chn = 20
+        conv_param_shape = [out_chn, in_chn, kernel_size, kernel_size]
+
+    out_feature_dim = compute_conv_output_dim(in_feature_dim, kernel_size, stride, pad)
+
+    input_shape = [1, in_chn, in_feature_dim, in_feature_dim]
+    output_shape = [1, out_chn, out_feature_dim, out_feature_dim]
+
+    conv_weight_dt = DataType.UINT4
+
+    conv_config = {}
+    conv_config["dilations"] = [1, 1]
+    conv_config["group"] = group
+    conv_config["kernel_shape"] = [kernel_size, kernel_size]
+    conv_config["pads"] = [pad, pad, pad, pad]
+    conv_config["strides"] = [stride, stride]
+
+    top_in = helper.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = helper.make_tensor_value_info("top_out", TensorProto.FLOAT, output_shape)
+    value_info = [
+        helper.make_tensor_value_info("p1", TensorProto.FLOAT, conv_param_shape)
+    ]
+
+    modelproto = helper.make_model(
+        helper.make_graph(
+            name="conv_test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                helper.make_node("Conv", ["top_in", "p1"], ["top_out"], **conv_config)
+            ],
+        )
+    )
+
+    model = ModelWrapper(modelproto)
+    model.set_tensor_datatype("top_in", idt)
+    model.set_tensor_datatype("top_out", idt)
+    model.set_tensor_datatype("p1", conv_weight_dt)
+    model.set_initializer("p1", gen_finn_dt_tensor(conv_weight_dt, conv_param_shape))
+
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+
+    new_model = model.transform(LowerConvsToMatMul())
+    new_model = new_model.transform(to_hls.InferConvInpGen())
+    if depthwise is True:
+        new_model = new_model.transform(to_hls.InferVVAU())
+    else:
+        new_model = new_model.transform(to_hls.InferQuantizedStreamingFCLayer())
+        fc_node = new_model.get_nodes_by_op_type("StreamingFCLayer_Batch")[0]
+        fc_inst = getCustomOp(fc_node)
+        mw = fc_inst.get_nodeattr("MW")
+        mh = fc_inst.get_nodeattr("MH")
+        pe_cands = list(filter(lambda x: mh % x == 0, range(2, mh + 1)))
+        simd_cands = list(filter(lambda x: mw % x == 0, range(2, mw + 1)))
+        fc_inst.set_nodeattr("PE", pe_cands[0])
+        fc_inst.set_nodeattr("SIMD", simd_cands[0])
+
+    new_model = new_model.transform(GiveUniqueNodeNames())
+    new_model = new_model.transform(InferShapes())
+    new_model = new_model.transform(InferDataTypes())
+
+    if exec_mode == "cppsim":
+        new_model = new_model.transform(PrepareCppSim())
+        new_model = new_model.transform(CompileCppSim())
+        new_model = new_model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        new_model = new_model.transform(SetExecMode("rtlsim"))
+        new_model = new_model.transform(GiveUniqueNodeNames())
+        new_model = new_model.transform(PrepareIP("xc7z020clg400-1", 5))
+        new_model = new_model.transform(HLSSynthIP())
+        new_model = new_model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    x = gen_finn_dt_tensor(idt, input_shape)
+    inp_dict = {model.graph.input[0].name: x}
+    assert oxe.compare_execution(model, new_model, inp_dict)
+    if kernel_size == 1 and stride > 1 and pad == 0:
+        assert new_model.graph.node[1].op_type == "DownSampler"
+        if exec_mode == "rtlsim":
+            node = new_model.get_nodes_by_op_type("DownSampler")[0]
+            inst = getCustomOp(node)
+            cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+            exp_cycles_dict = new_model.analysis(exp_cycles_per_layer)
+            exp_cycles = exp_cycles_dict[node.name]
+            assert np.isclose(exp_cycles, cycles_rtlsim, atol=11)
+            assert exp_cycles != 0
+
+    if pad == 1:
+        padding_node = new_model.get_nodes_by_op_type("FMPadding_Batch")[0]
+        padding_inst = getCustomOp(padding_node)
+        assert padding_inst.get_nodeattr("SIMD") == in_chn
+
+    if depthwise is True and exec_mode == "rtlsim":
+        node = new_model.get_nodes_by_op_type("Vector_Vector_Activate_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = new_model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=11)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_convert_to_hls_layers_cnv.py b/tests/fpgadataflow/test_convert_to_hls_layers_cnv.py
index 220f8a7966a146f954a7fcb3f32058e231b83e23..e8b50efef0723c1394c2bdd438a87e090071507d 100644
--- a/tests/fpgadataflow/test_convert_to_hls_layers_cnv.py
+++ b/tests/fpgadataflow/test_convert_to_hls_layers_cnv.py
@@ -31,7 +31,7 @@ import pkg_resources as pk
 
 import brevitas.onnx as bo
 import numpy as np
-
+import pytest
 import finn.core.onnx_exec as oxe
 import finn.transformation.streamline.absorb as absorb
 from finn.transformation.streamline.reorder import MakeMaxPoolNHWC
@@ -39,9 +39,9 @@ from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fold_constants import FoldConstants
 from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
 from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_data_layouts import InferDataLayouts
 from finn.transformation.streamline import Streamline
 from finn.util.test import get_test_model_trained
-from finn.transformation.double_to_single_float import DoubleToSingleFloat
 from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul
 from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount
 import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
@@ -50,14 +50,16 @@ from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
 from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
 from finn.custom_op.registry import getCustomOp
 
-export_onnx_path_cnv = "test_output_cnv.onnx"
+export_onnx_path_cnv = "test_convert_to_hls_layers_cnv.onnx"
 
 
-def test_convert_to_hls_layers_cnv_w1a1():
+@pytest.mark.vivado
+# Standalone or fused thresholding-based activation
+@pytest.mark.parametrize("fused_activation", [True, False])
+def test_convert_to_hls_layers_cnv_w1a1(fused_activation):
     cnv = get_test_model_trained("CNV", 1, 1)
     bo.export_finn_onnx(cnv, (1, 3, 32, 32), export_onnx_path_cnv)
     model = ModelWrapper(export_onnx_path_cnv)
-    model = model.transform(DoubleToSingleFloat())
     model = model.transform(InferShapes())
     model = model.transform(FoldConstants())
     model = model.transform(GiveUniqueNodeNames())
@@ -68,6 +70,7 @@ def test_convert_to_hls_layers_cnv_w1a1():
     model = model.transform(absorb.AbsorbTransposeIntoMultiThreshold())
     model = model.transform(ConvertBipolarMatMulToXnorPopcount())
     model = model.transform(Streamline())
+    model = model.transform(InferDataLayouts())
     # model.save("golden.onnx")
     # load one of the test vectors
     fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
@@ -79,6 +82,10 @@ def test_convert_to_hls_layers_cnv_w1a1():
     expected_ctx = oxe.execute_onnx(model, input_dict, True)
     expected = expected_ctx[model.graph.output[0].name]
 
+    # if we infer thresholding first, all MultiThresholds get converted to HLS
+    # subsequently, the FC inference will generate passthrough MVAUs
+    if not fused_activation:
+        model = model.transform(to_hls.InferThresholdingLayer())
     model = model.transform(to_hls.InferBinaryStreamingFCLayer())
     model = model.transform(to_hls.InferQuantizedStreamingFCLayer())
     for node in model.graph.node:
@@ -101,7 +108,12 @@ def test_convert_to_hls_layers_cnv_w1a1():
     model = model.transform(to_hls.InferStreamingMaxPool())
     # check topology status
     finn_nodes = model.get_finn_nodes()
-    assert len(finn_nodes) == 18
+    if fused_activation:
+        assert len(finn_nodes) == 18
+    else:
+        assert len(finn_nodes) == 26
+        thr_nodes = model.get_nodes_by_op_type("Thresholding_Batch")
+        assert len(thr_nodes) == 8
     non_finn_nodes = model.get_non_finn_nodes()
     assert len(non_finn_nodes) == 4
     exp_non_finn_nodes = ["Transpose", "Reshape", "Mul", "Add"]
diff --git a/tests/fpgadataflow/test_convert_to_hls_layers_fc.py b/tests/fpgadataflow/test_convert_to_hls_layers_fc.py
index b7dea03797bc5de5e7517d0d8b816c438027008b..bd600c6c57d00d5fc03152f75b9f2f8c6beeeb2c 100644
--- a/tests/fpgadataflow/test_convert_to_hls_layers_fc.py
+++ b/tests/fpgadataflow/test_convert_to_hls_layers_fc.py
@@ -34,7 +34,7 @@ import numpy as np
 import onnx
 import onnx.numpy_helper as nph
 import torch
-
+import pytest
 import finn.core.onnx_exec as oxe
 import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
 import finn.transformation.streamline.absorb as absorb
@@ -52,10 +52,10 @@ from finn.transformation.streamline.round_thresholds import RoundAndClipThreshol
 from finn.util.test import get_test_model_trained
 
 
-export_onnx_path = "test_output_tfc.onnx"
-export_onnx_path_cnv = "test_output_cnv.onnx"
+export_onnx_path = "test_convert_to_hls_layers_fc.onnx"
 
 
+@pytest.mark.vivado
 def test_convert_to_hls_layers_tfc_w1a1():
     tfc = get_test_model_trained("TFC", 1, 1)
     bo.export_finn_onnx(tfc, (1, 1, 28, 28), export_onnx_path)
@@ -89,7 +89,6 @@ def test_convert_to_hls_layers_tfc_w1a1():
     assert fc3.op_type == "StreamingFCLayer_Batch"
     assert model.get_tensor_shape(fc3.input[0]) == [1, 64]
     assert model.get_tensor_shape(fc3.input[1]) == [64, 10]
-    os.remove(export_onnx_path)
 
     fc0w = getCustomOp(fc0)
     fc0w.set_nodeattr("SIMD", 784)
@@ -123,8 +122,10 @@ def test_convert_to_hls_layers_tfc_w1a1():
     # do forward pass in PyTorch/Brevitas
     expected = tfc.forward(input_tensor).detach().numpy()
     assert np.isclose(produced, expected, atol=1e-3).all()
+    os.remove(export_onnx_path)
 
 
+@pytest.mark.vivado
 def test_convert_to_hls_layers_tfc_w1a2():
     tfc = get_test_model_trained("TFC", 1, 2)
     bo.export_finn_onnx(tfc, (1, 1, 28, 28), export_onnx_path)
diff --git a/tests/fpgadataflow/test_convert_to_hls_layers_synthetic.py b/tests/fpgadataflow/test_convert_to_hls_layers_synthetic.py
new file mode 100644
index 0000000000000000000000000000000000000000..86875d2ac7f37e697c5de198e15aa3045a9e3d42
--- /dev/null
+++ b/tests/fpgadataflow/test_convert_to_hls_layers_synthetic.py
@@ -0,0 +1,230 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import numpy as np
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fold_constants import FoldConstants
+from finn.transformation.general import (
+    GiveReadableTensorNames,
+    GiveUniqueNodeNames,
+    SortGraph,
+)
+from finn.transformation.streamline.reorder import MoveScalarLinearPastInvariants
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.util.basic import gen_finn_dt_tensor
+from finn.util.test import soft_verify_topk
+from finn.transformation.insert_topk import InsertTopK
+import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.streamline.absorb import (
+    AbsorbScalarMulAddIntoTopK,
+    AbsorbConsecutiveTransposes,
+)
+from finn.transformation.streamline.collapse_repeated import (
+    CollapseRepeatedMul,
+    CollapseRepeatedAdd,
+)
+from finn.transformation.streamline.reorder import MoveAddPastMul
+
+import pytest
+
+export_onnx_path = "test_output_synthetic.onnx"
+
+# construct a synthetic graph to test:
+# topk insertion, topk conversion to hls, add conversion to hls
+# graph should just be a sum
+
+
+def make_model(ch, ifmdim):
+    shape = [1, ch, ifmdim, ifmdim]
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, shape)
+    inp1_add0_ct = helper.make_tensor_value_info("inp1_add0_ct", TensorProto.FLOAT, [1])
+    inp1_add = helper.make_tensor_value_info("inp1_add", TensorProto.FLOAT, shape)
+    inp1_add_ct = helper.make_tensor_value_info("inp1_add_ct", TensorProto.FLOAT, [1])
+    inp2_add = helper.make_tensor_value_info("inp2_add", TensorProto.FLOAT, shape)
+    inp2_add_ct = helper.make_tensor_value_info("inp2_add_ct", TensorProto.FLOAT, [1])
+    inp1_mul = helper.make_tensor_value_info("inp1_mul", TensorProto.FLOAT, shape)
+    inp1_mul_ct = helper.make_tensor_value_info("inp1_mul_ct", TensorProto.FLOAT, [1])
+    inp2_mul = helper.make_tensor_value_info("inp2_mul", TensorProto.FLOAT, shape)
+    inp2_mul_ct = helper.make_tensor_value_info("inp2_mul_ct", TensorProto.FLOAT, [1])
+    eltwise_add = helper.make_tensor_value_info("eltwise_add", TensorProto.FLOAT, shape)
+    pool = helper.make_tensor_value_info("pool", TensorProto.FLOAT, [1, ch, 1, 1])
+    reshape_ct = helper.make_tensor_value_info("reshape_ct", TensorProto.INT64, [2])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, ch])
+
+    add0_node = helper.make_node("Add", [inp.name, inp1_add0_ct.name], ["out_add0"])
+    add1_node = helper.make_node("Add", ["out_add0", inp1_add_ct.name], [inp1_add.name])
+    add2_node = helper.make_node("Add", ["out_add0", inp2_add_ct.name], [inp2_add.name])
+    mul1_node = helper.make_node(
+        "Mul", [inp1_add.name, inp1_mul_ct.name], [inp1_mul.name]
+    )
+    mul2_node = helper.make_node(
+        "Mul", [inp2_add.name, inp2_mul_ct.name], [inp2_mul.name]
+    )
+    eltwise_add_node = helper.make_node(
+        "Add", [inp1_mul.name, inp2_mul.name], [eltwise_add.name]
+    )
+    globalavgpool_node = helper.make_node(
+        "GlobalAveragePool", [eltwise_add.name], [pool.name]
+    )
+    reshape_node = helper.make_node(
+        "Reshape", [pool.name, reshape_ct.name], [outp.name]
+    )
+
+    graph = helper.make_graph(
+        nodes=[
+            add0_node,
+            add1_node,
+            add2_node,
+            mul1_node,
+            mul2_node,
+            eltwise_add_node,
+            globalavgpool_node,
+            reshape_node,
+        ],
+        name="graph",
+        inputs=[inp],
+        outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="add-model")
+    model = ModelWrapper(model)
+
+    # set initializers for scalar add/mul nodes
+    model.set_initializer(add0_node.input[1], np.array([0.0]))
+    model.set_initializer(add1_node.input[1], np.array([7.0]))
+    model.set_initializer(add2_node.input[1], np.array([8.0]))
+    model.set_initializer(mul1_node.input[1], np.array([2.0]))
+    model.set_initializer(mul2_node.input[1], np.array([2.0]))
+    model.set_initializer(reshape_node.input[1], np.array([1, -1]))
+
+    return model
+
+
+# data types
+@pytest.mark.parametrize("idt", [DataType.UINT2])
+# channels
+@pytest.mark.parametrize("ch", [16])
+# ifmdim
+@pytest.mark.parametrize("ifmdim", [5])
+@pytest.mark.vivado
+@pytest.mark.slow
+def test_convert_to_hls_layers_synthetic(ch, ifmdim, idt):
+    model = make_model(ch, ifmdim)
+    model.save(export_onnx_path)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    model = model.transform(FoldConstants())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    model = model.transform(InferDataLayouts())
+    # model.save("golden.onnx")
+    # generate test vectors of correct shape
+    if ifmdim == -1:
+        input_tensor_shape = (1, ch)
+    else:
+        input_tensor_shape = (1, ch, ifmdim, ifmdim)
+
+    x = gen_finn_dt_tensor(idt, input_tensor_shape)
+
+    # generate expected value from streamlined net
+    input_dict = {model.graph.input[0].name: x}
+
+    output_dict = oxe.execute_onnx(model, input_dict, True)
+    produced_sum = output_dict[model.graph.output[0].name]
+    chw_mul = model.get_initializer(model.graph.node[-1].input[1])
+    chw_mul = 1
+    expected_sum = chw_mul * np.sum(2 * (2 * x + 15.0), axis=(2, 3)) / (ifmdim * ifmdim)
+    assert (produced_sum.flatten() == expected_sum.flatten()).all()
+
+    model = model.transform(InferDataLayouts())
+
+    # convert to hls
+    model.set_tensor_datatype(model.graph.input[0].name, idt)
+    # extra streamlining
+    model = model.transform(MoveScalarLinearPastInvariants())
+    model = model.transform(MoveAddPastMul())
+    model = model.transform(CollapseRepeatedMul())
+    model = model.transform(CollapseRepeatedAdd())
+    # insert top-k node, which should absorb linear ops before it
+
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataLayouts())
+    model = model.transform(InferDataTypes())
+
+    model = model.transform(to_hls.InferChannelwiseLinearLayer())
+    model = model.transform(to_hls.InferAddStreamsLayer())
+    model = model.transform(to_hls.InferGlobalAccPoolLayer())
+    model = model.transform(MoveScalarLinearPastInvariants())
+    model = model.transform(InsertTopK())
+    model = model.transform(AbsorbScalarMulAddIntoTopK())
+    model = model.transform(InferDataTypes())
+    model = model.transform(to_hls.InferLabelSelectLayer())
+    model = model.transform(AbsorbConsecutiveTransposes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(to_hls.InferLabelSelectLayer())
+    model = model.transform(to_hls.InferDuplicateStreamsLayer())
+
+    model = model.transform(SortGraph())
+
+    # model.save("golden_hls.onnx")
+    # check topology status
+
+    finn_nodes = model.get_finn_nodes()
+    assert len(finn_nodes) == 9
+    add_nodes = model.get_nodes_by_op_type("AddStreams_Batch")
+    assert len(add_nodes) == 1
+    pool_nodes = model.get_nodes_by_op_type("GlobalAccPool_Batch")
+    assert len(pool_nodes) == 1
+    label_nodes = model.get_nodes_by_op_type("LabelSelect_Batch")
+    assert len(label_nodes) == 1
+    channelwise_nodes = model.get_nodes_by_op_type("ChannelwiseOp_Batch")
+    assert len(channelwise_nodes) == 5
+    dup_nodes = model.get_nodes_by_op_type("DuplicateStreams_Batch")
+    assert len(dup_nodes) == 1
+
+    model = model.transform(PrepareCppSim())
+    model = model.transform(CompileCppSim())
+    model = model.transform(SetExecMode("cppsim"))
+
+    output_dict = oxe.execute_onnx(model, input_dict, True)
+    produced_topk_hls = output_dict[model.graph.output[0].name]
+    topk_input = output_dict[model.graph.node[-1].input[0]]
+    assert soft_verify_topk(topk_input, produced_topk_hls, 5)
+
+    os.remove(export_onnx_path)
diff --git a/tests/fpgadataflow/test_convert_to_hls_pool_batch.py b/tests/fpgadataflow/test_convert_to_hls_pool_batch.py
new file mode 100644
index 0000000000000000000000000000000000000000..86409feffd120b1baeeee471415e93f29d9e655a
--- /dev/null
+++ b/tests/fpgadataflow/test_convert_to_hls_pool_batch.py
@@ -0,0 +1,221 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+
+from onnx import TensorProto, helper
+import numpy as np
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.custom_op.registry import getCustomOp
+from finn.util.basic import gen_finn_dt_tensor
+from finn.transformation.infer_shapes import InferShapes
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+
+def make_single_maxpool_modelwrapper(k, stride, pad, ifm_ch, ifm_dim, ofm_dim, idt):
+    odt = idt
+    inp = helper.make_tensor_value_info(
+        "inp", TensorProto.FLOAT, [1, ifm_ch, ifm_dim, ifm_dim]
+    )
+    outp = helper.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, [1, ifm_ch, ofm_dim, ofm_dim]
+    )
+
+    mp_node = helper.make_node(
+        "MaxPool",
+        ["inp"],
+        ["outp"],
+        kernel_shape=[k, k],
+        pads=[pad, pad, pad, pad],
+        strides=[stride, stride],
+    )
+    graph = helper.make_graph(
+        nodes=[mp_node], name="mp_graph", inputs=[inp], outputs=[outp]
+    )
+
+    model = helper.make_model(graph, producer_name="mp-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype("outp", odt)
+    model = model.transform(InferShapes())
+
+    return model
+
+
+def make_single_quantavpool_modelwrapper(k, stride, ifm_ch, ifm_dim, ofm_dim, idt, odt):
+    inp = helper.make_tensor_value_info(
+        "inp", TensorProto.FLOAT, [1, ifm_ch, ifm_dim, ifm_dim]
+    )
+    outp = helper.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, [1, ifm_ch, ofm_dim, ofm_dim]
+    )
+
+    mp_node = helper.make_node(
+        "QuantAvgPool2d",
+        ["inp"],
+        ["outp"],
+        domain="finn",
+        stride=stride,
+        kernel=k,
+        ibits=idt.bitwidth(),
+        obits=odt.bitwidth(),
+        signed=1 if idt.signed() else 0,
+        data_layout="NCHW",
+    )
+    graph = helper.make_graph(
+        nodes=[mp_node], name="mp_graph", inputs=[inp], outputs=[outp]
+    )
+
+    model = helper.make_model(graph, producer_name="mp-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype("outp", odt)
+    model = model.transform(InferShapes())
+
+    return model
+
+
+def prepare_inputs(input_tensor):
+    return {"inp": input_tensor}
+
+
+# input datatype
+@pytest.mark.parametrize("idt", [DataType.UINT4, DataType.INT4, DataType.INT8])
+# output datatype
+@pytest.mark.parametrize("odt", [DataType.UINT4, DataType.INT4])
+# pool configuration:                   ( k,stride, pad, ifm_dim )
+@pytest.mark.parametrize("pool_config", [(7, 7, 0, 7), (3, 2, 1, 5)])
+# input channels
+@pytest.mark.parametrize("ifm_ch", [1, 4])
+# number of out channel computed in parallel
+@pytest.mark.parametrize("pe", [1, 2, 4])
+# pool type
+@pytest.mark.parametrize("op_type", ["QuantAvgPool2d", "MaxPool"])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_convert_to_hls_pool_batch(
+    idt, odt, pool_config, ifm_ch, pe, op_type, exec_mode
+):
+    k, stride, pad, ifm_dim = pool_config
+
+    if ifm_ch % pe != 0:
+        pytest.skip("ifm_ch%pe != 0. Skipping")
+
+    if pad != 0 and idt.signed():
+        pytest.skip("No support for pal_val != 0. Skipping")
+
+    np.random.seed(0)
+    ofm_dim = int(((ifm_dim + 2 * pad - k) / stride) + 1)
+
+    x = gen_finn_dt_tensor(idt, (1, ifm_ch, ifm_dim, ifm_dim))
+    # prepare input data
+    input_dict = prepare_inputs(x)
+    if op_type == "MaxPool":
+        # if idt.signed():
+        #     pytest.skip("""No support for signed input (see accu initialization
+        #         in Pool_batch HLSLIB function). Skipping""")
+
+        if idt != odt:
+            pytest.skip("Skipping Maxpool with idt != odt")
+
+        model = make_single_maxpool_modelwrapper(
+            k, stride, pad, ifm_ch, ifm_dim, ofm_dim, idt
+        )
+    elif op_type == "QuantAvgPool2d":
+        if pad != 0:
+            pytest.skip("No padding support for QuantAvgPool2d. Skipping")
+
+        if idt.signed() != odt.signed():
+            pytest.skip("Skipping QuantAvgPool2d with idt.signed() != odt.signed()")
+        model = make_single_quantavpool_modelwrapper(
+            k, stride, ifm_ch, ifm_dim, ofm_dim, idt, odt
+        )
+    else:
+        assert False, "{} is not a supported op_type".format(op_type)
+
+    y_expected = oxe.execute_onnx(model, input_dict)["outp"]
+
+    new_model = model.transform(to_hls.InferPool_Batch())
+    new_model = new_model.transform(GiveUniqueNodeNames())
+
+    if ifm_ch != pe:
+        new_model = new_model.transform(to_hls.InferConvInpGen())
+        # Folding
+        for n in new_model.graph.node:
+            if n.op_type == "ConvolutionInputGenerator":
+                inst = getCustomOp(n)
+                inst.set_nodeattr("SIMD", pe)
+            elif n.op_type == "Pool_Batch":
+                inst = getCustomOp(n)
+                inst.set_nodeattr("PE", pe)
+
+    if exec_mode == "cppsim":
+        new_model = new_model.transform(SetExecMode("cppsim"))
+        new_model = new_model.transform(PrepareCppSim())
+        new_model = new_model.transform(CompileCppSim())
+    elif exec_mode == "rtlsim":
+        new_model = new_model.transform(SetExecMode("rtlsim"))
+        new_model = new_model.transform(GiveUniqueNodeNames())
+        new_model = new_model.transform(PrepareIP("xc7z020clg400-1", 5))
+        new_model = new_model.transform(HLSSynthIP())
+        new_model = new_model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # execute new_model
+    y_produced = oxe.execute_onnx(new_model, input_dict)["outp"]
+    assert (y_produced == y_expected).all()
+    if stride <= k:
+        if pad == 0 or ifm_ch == pe:
+            assert len(new_model.graph.node) == 4
+        else:
+            assert len(new_model.graph.node) == 5
+    else:
+        assert len(new_model.graph.node) == 1
+
+    if exec_mode == "rtlsim":
+        node = new_model.get_nodes_by_op_type("Pool_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = new_model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
diff --git a/tests/fpgadataflow/test_create_dataflow_partition.py b/tests/fpgadataflow/test_create_dataflow_partition.py
index 77e0ddeebf6080e1840d6014978a4c9b4a10b5c1..c4f748051ff038371353574298580f3bf9e05e9f 100644
--- a/tests/fpgadataflow/test_create_dataflow_partition.py
+++ b/tests/fpgadataflow/test_create_dataflow_partition.py
@@ -29,7 +29,6 @@
 import os.path
 from pkgutil import get_data
 
-import pytest
 
 from finn.core.modelwrapper import ModelWrapper
 from finn.custom_op.registry import getCustomOp
@@ -38,11 +37,11 @@ from finn.transformation.fpgadataflow.create_dataflow_partition import (
 )
 from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
 from finn.util.basic import make_build_dir
+from finn.util.test import load_test_checkpoint_or_skip
 
 build_dir = make_build_dir("test_dataflow_partition_")
 
 
-@pytest.mark.dependency()
 def test_dataflow_partition_create():
     # load the onnx model
     raw_m = get_data(
@@ -57,9 +56,10 @@ def test_dataflow_partition_create():
     model.save(build_dir + "/test_dataflow_partition_create.onnx")
 
 
-@pytest.mark.dependency(depends=["test_dataflow_partition_create"])
 def test_dataflow_partition_tlastmarker():
-    model = ModelWrapper(build_dir + "/test_dataflow_partition_create.onnx")
+    model = load_test_checkpoint_or_skip(
+        build_dir + "/test_dataflow_partition_create.onnx"
+    )
     model_path = getCustomOp(model.graph.node[2]).get_nodeattr("model")
     model = ModelWrapper(model_path)
     model = model.transform(InsertTLastMarker())
diff --git a/tests/fpgadataflow/test_depthwise_convolution.py b/tests/fpgadataflow/test_depthwise_convolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..f269a1ed7247503f561425b97115694503522171
--- /dev/null
+++ b/tests/fpgadataflow/test_depthwise_convolution.py
@@ -0,0 +1,245 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import onnx.helper as oh
+from onnx import TensorProto
+import numpy as np
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.fpgadataflow.convert_to_hls_layers import (
+    InferConvInpGen,
+    InferVVAU,
+)
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+
+import finn.core.onnx_exec as oxe
+from finn.custom_op.im2col import compute_conv_output_dim
+from finn.util.basic import calculate_signed_dot_prod_range, gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
+
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+
+
+def set_up_reference_model(act, idt, wdt, k, ifm_dim, ifm_ch, stride, padding):
+
+    # set up reference model consisting of Im2Col + MatMul (+ MultiThreshold)
+    ofm_ch = ifm_ch
+    ofm_dim = compute_conv_output_dim(ifm_dim, k, stride, pad=padding)
+
+    if act is None:
+        odt = DataType.INT32
+    else:
+        odt = act
+        out_act = oh.make_tensor_value_info(
+            "out_act", TensorProto.FLOAT, [1, ofm_dim, ofm_dim, ofm_ch]
+        )
+        T = oh.make_tensor_value_info("T", TensorProto.FLOAT, [ofm_ch, 15])
+        tdt = DataType.INT32
+        thresh_node = oh.make_node(
+            "MultiThreshold",
+            domain="finn",
+            inputs=["outp", "T"],
+            outputs=["out_act"],
+            data_layout="NHWC",
+            out_dtype=odt.name,
+            out_scale=1.0,
+            out_bias=0.0,
+        )
+
+    # set up onnx model
+    inp = oh.make_tensor_value_info(
+        "inp", TensorProto.FLOAT, [1, ifm_dim, ifm_dim, ifm_ch]
+    )
+    outp = oh.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, [1, ofm_dim, ofm_dim, ofm_ch]
+    )
+
+    W_sparse = oh.make_tensor_value_info(
+        "W_sparse", TensorProto.FLOAT, [ifm_ch * k * k, ofm_ch]
+    )
+
+    im2col_node = oh.make_node(
+        "Im2Col",
+        domain="finn",
+        inputs=["inp"],
+        outputs=["im2col_out"],
+        kernel_size=k,
+        stride=stride,
+        pad_amount=padding,
+        input_shape="(1, {}, {}, {})".format(ifm_dim, ifm_dim, ifm_ch),
+        depthwise=1,
+    )
+
+    matmul_node = oh.make_node(
+        "MatMul", inputs=["im2col_out", "W_sparse"], outputs=["outp"]
+    )
+
+    if act is None:
+        node_list = [im2col_node, matmul_node]
+        global_out = outp
+        value_info = [W_sparse]
+    else:
+        node_list = [im2col_node, matmul_node, thresh_node]
+        global_out = out_act
+        value_info = [W_sparse, T]
+
+    graph = oh.make_graph(
+        nodes=node_list,
+        name="lowered_dw_cnv_graph",
+        inputs=[inp],
+        outputs=[global_out],
+        value_info=value_info,
+    )
+    model = oh.make_model(graph, producer_name="lowered_dw_cnv-model")
+    model = ModelWrapper(model)
+
+    # initialize model
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype(model.graph.output[0].name, odt)
+    model.set_tensor_datatype("W_sparse", wdt)
+
+    w_tensor = gen_finn_dt_tensor(wdt, [ofm_ch, 1, k, k])
+    # create sparse matrix
+    W_matrix = np.zeros((ofm_ch, ifm_ch, k, k))
+    for ch in range(ifm_ch):
+        W_matrix[ch][ch] = w_tensor[ch][0]
+    W_matrix = W_matrix.astype(np.float32)
+    W_matrix = W_matrix.transpose(0, 2, 3, 1)
+    W_matrix = W_matrix.reshape(ofm_ch, ifm_ch * k * k)
+
+    model.set_initializer("W_sparse", W_matrix.T)
+    sparsity = {"dw": {"kernel_shape": k}}
+    model.set_tensor_sparsity("W_sparse", sparsity)
+
+    if act is not None:
+        (min, max) = calculate_signed_dot_prod_range(idt, wdt, ifm_ch * k * k)
+        n_steps = odt.get_num_possible_values() - 1
+        T_values = np.random.randint(min, max - 1, (ofm_ch, n_steps)).astype(np.float32)
+        # provide non-decreasing thresholds
+        T_values = np.sort(T_values, axis=1)
+        model.set_initializer("T", T_values)
+        model.set_tensor_datatype("T", tdt)
+
+    model = model.transform(InferShapes())
+
+    return model
+
+
+# PE
+@pytest.mark.parametrize("pe", [1, 2, 4])
+# Output activation
+@pytest.mark.parametrize("act", [None, DataType.UINT4])
+# kernel size
+@pytest.mark.parametrize("k", [2, 4])
+# stride
+@pytest.mark.parametrize("stride", [1, 2])
+# padding
+@pytest.mark.parametrize("padding", [0, 1])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_depthwise_conv_hls_cppsim(act, pe, k, stride, padding):
+    idt = wdt = DataType.INT4
+    ifm_dim = 6
+    ifm_ch = 4
+
+    # set up reference model consisting of Im2Col + MatMul (+ MultiThreshold)
+    model = set_up_reference_model(act, idt, wdt, k, ifm_dim, ifm_ch, stride, padding)
+
+    input_tensor = gen_finn_dt_tensor(idt, [1, ifm_dim, ifm_dim, ifm_ch])
+    input_dict = {"inp": input_tensor}
+
+    new_model = model.transform(InferConvInpGen())
+    new_model = new_model.transform(InferVVAU())
+
+    # set SIMD in ConvInputGen node and PE in VVAU node
+
+    for n in new_model.graph.node:
+        if n.op_type == "ConvolutionInputGenerator":
+            convinputgen_node = getCustomOp(n)
+            convinputgen_node.set_nodeattr("SIMD", pe)
+        elif n.op_type == "Vector_Vector_Activate_Batch":
+            vvau_node = getCustomOp(n)
+            vvau_node.set_nodeattr("PE", pe)
+    new_model = new_model.transform(SetExecMode("cppsim"))
+    new_model = new_model.transform(PrepareCppSim())
+    new_model = new_model.transform(CompileCppSim())
+
+    assert oxe.compare_execution(model, new_model, input_dict)
+
+
+# PE
+@pytest.mark.parametrize("pe", [1, 2, 4])
+# Output activation
+@pytest.mark.parametrize("act", [None, DataType.UINT4])
+# kernel size
+@pytest.mark.parametrize("k", [2, 4])
+# stride
+@pytest.mark.parametrize("stride", [1, 2])
+# padding
+@pytest.mark.parametrize("padding", [0, 1])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_depthwise_conv_hls_rtlsim(act, pe, k, stride, padding):
+    idt = wdt = DataType.INT4
+    ifm_dim = 6
+    ifm_ch = 4
+
+    # set up reference model consisting of Im2Col + MatMul (+ MultiThreshold)
+    model = set_up_reference_model(act, idt, wdt, k, ifm_dim, ifm_ch, stride, padding)
+
+    input_tensor = gen_finn_dt_tensor(idt, [1, ifm_dim, ifm_dim, ifm_ch])
+    input_dict = {"inp": input_tensor}
+
+    new_model = model.transform(InferConvInpGen())
+    new_model = new_model.transform(InferVVAU())
+
+    # set SIMD in ConvInputGen node and PE in VVAU node
+
+    for n in new_model.graph.node:
+        if n.op_type == "ConvolutionInputGenerator":
+            convinputgen_node = getCustomOp(n)
+            convinputgen_node.set_nodeattr("SIMD", pe)
+        elif n.op_type == "Vector_Vector_Activate_Batch":
+            vvau_node = getCustomOp(n)
+            vvau_node.set_nodeattr("PE", pe)
+
+    new_model = new_model.transform(SetExecMode("rtlsim"))
+    new_model = new_model.transform(GiveUniqueNodeNames())
+    new_model = new_model.transform(PrepareIP("xc7z020clg400-1", 5))
+    new_model = new_model.transform(HLSSynthIP())
+    new_model = new_model.transform(PrepareRTLSim())
+
+    assert oxe.compare_execution(model, new_model, input_dict)
diff --git a/tests/fpgadataflow/test_fpgadataflow_addstreams.py b/tests/fpgadataflow/test_fpgadataflow_addstreams.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4191c75d6249d22b36e41fed50c5f7896f13c22
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_addstreams.py
@@ -0,0 +1,135 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import numpy as np
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+
+def make_addstreams_modelwrapper(ch, pe, idt):
+    inp1 = helper.make_tensor_value_info("inp1", TensorProto.FLOAT, [1, ch])
+    inp2 = helper.make_tensor_value_info("inp2", TensorProto.FLOAT, [1, ch])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, ch])
+
+    addstreams_node = helper.make_node(
+        "AddStreams_Batch",
+        ["inp1", "inp2"],
+        ["outp"],
+        domain="finn",
+        backend="fpgadataflow",
+        NumChannels=ch,
+        PE=pe,
+        inputDataType=idt.name,
+    )
+    graph = helper.make_graph(
+        nodes=[addstreams_node], name="graph", inputs=[inp1, inp2], outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="addstreams-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp1", idt)
+    model.set_tensor_datatype("inp2", idt)
+
+    return model
+
+
+def prepare_inputs(input1, input2):
+    return {"inp1": input1, "inp2": input2}
+
+
+# data types
+@pytest.mark.parametrize("idt", [DataType.UINT4, DataType.UINT8])
+# channels
+@pytest.mark.parametrize("ch", [1, 64])
+# folding
+@pytest.mark.parametrize("fold", [-1, 2, 1])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+def test_fpgadataflow_addstreams(idt, ch, fold, exec_mode):
+    if fold == -1:
+        pe = 1
+    else:
+        pe = max(1, ch // fold)
+    assert ch % pe == 0
+
+    # generate input data
+    x1 = gen_finn_dt_tensor(idt, (1, ch))
+    x2 = gen_finn_dt_tensor(idt, (1, ch))
+
+    model = make_addstreams_modelwrapper(ch, pe, idt)
+
+    if exec_mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        model = model.transform(SetExecMode("rtlsim"))
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP("xc7z020clg400-1", 5))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # prepare input data
+    input_dict = prepare_inputs(x1, x2)
+
+    oshape = model.get_tensor_shape("outp")
+    y = x1 + x2
+    y_expected = y.reshape(oshape)
+    # execute model
+    y_produced = oxe.execute_onnx(model, input_dict)["outp"]
+    y_produced = y_produced.reshape(y_expected.shape)
+
+    assert (y_produced == y_expected).all(), exec_mode + " failed"
+
+    if exec_mode == "rtlsim":
+        node = model.get_nodes_by_op_type("AddStreams_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_channelwise_ops.py b/tests/fpgadataflow/test_fpgadataflow_channelwise_ops.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e0ce7d542f605cc6af5df13b45d670cfcafa5a9
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_channelwise_ops.py
@@ -0,0 +1,162 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+
+import numpy as np
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.analysis.fpgadataflow.hls_synth_res_estimation import hls_synth_res_estimation
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+
+def make_modelwrapper(C, pe, idt, odt, pdt, func, vecs):
+    NumChannels = C.shape[0]
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, vecs + [NumChannels])
+    outp = helper.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, vecs + [NumChannels]
+    )
+
+    node_inp_list = ["inp", "const"]
+
+    node = helper.make_node(
+        "ChannelwiseOp_Batch",
+        node_inp_list,
+        ["outp"],
+        domain="finn",
+        backend="fpgadataflow",
+        NumChannels=NumChannels,
+        Func=func,
+        PE=pe,
+        inputDataType=idt.name,
+        outputDataType=odt.name,
+        paramDataType=pdt.name,
+        numInputVectors=vecs,
+    )
+    graph = helper.make_graph(nodes=[node], name="graph", inputs=[inp], outputs=[outp])
+
+    model = helper.make_model(graph, producer_name="model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype("outp", odt)
+
+    model.set_tensor_datatype("const", idt)
+    model.set_initializer("const", C)
+    return model
+
+
+# activation: None or DataType
+@pytest.mark.parametrize("act", [DataType.INT8])
+# input datatype
+@pytest.mark.parametrize("idt", [DataType.INT4])
+# param datatype
+@pytest.mark.parametrize("pdt", [DataType.INT4])
+# folding, -1 is maximum possible
+@pytest.mark.parametrize("nf", [-1, 2])
+# number of input features
+@pytest.mark.parametrize("ich", [16])
+# vecs
+@pytest.mark.parametrize("vecs", [[1], [1, 7, 7]])
+# function
+@pytest.mark.parametrize("func", ["add", "mul"])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+@pytest.mark.slow
+def test_fpgadataflow_channelwise_ops(idt, act, pdt, nf, ich, func, vecs, exec_mode):
+    if nf == -1:
+        nf = ich
+    pe = ich // nf
+    assert ich % pe == 0
+
+    # generate input and param data
+    x = gen_finn_dt_tensor(idt, tuple(vecs + [ich]))
+    # C = np.random.randint(idt.min(), idt.max() + 1, ich).astype(np.float32)
+    C = gen_finn_dt_tensor(pdt, (ich))
+
+    odt = act
+
+    model = make_modelwrapper(C, pe, idt, odt, pdt, func, vecs)
+
+    if exec_mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        model = model.transform(SetExecMode("rtlsim"))
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP("xc7z020clg400-1", 5))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # package input data as dictionary
+    input_dict = {"inp": x}
+
+    oshape = model.get_tensor_shape("outp")
+
+    C_reshaped = np.broadcast_to(C.flatten(), x.shape)
+    if func == "add":
+        y = x + C_reshaped
+    elif func == "mul":
+        y = x * C_reshaped
+
+    y_expected = y.reshape(oshape)
+    # execute model
+    y_produced = oxe.execute_onnx(model, input_dict)["outp"]
+
+    y_produced = y_produced.reshape(y_expected.shape)
+
+    assert (y_produced == y_expected).all(), "cppsim failed"
+
+    if exec_mode == "rtlsim":
+        hls_synt_res_est = model.analysis(hls_synth_res_estimation)
+        assert "ChannelwiseOp_Batch_0" in hls_synt_res_est
+
+        node = model.get_nodes_by_op_type("ChannelwiseOp_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py b/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py
index 02a9acae5e0e90d2a8dfa7d4d4afb03aa11f4239..020a2a545dadaf32c469789c90d0ea530688812c 100644
--- a/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py
+++ b/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py
@@ -27,6 +27,7 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import pytest
+import numpy as np
 
 from onnx import TensorProto, helper
 
@@ -42,6 +43,9 @@ from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
 from finn.transformation.general import GiveUniqueNodeNames
 from finn.util.basic import gen_finn_dt_tensor
 
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
 
 def make_single_im2col_modelwrapper(k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt):
     odt = idt
@@ -78,7 +82,7 @@ def make_single_im2col_modelwrapper(k, ifm_ch, ifm_dim, ofm_dim, simd, stride, i
 
 
 def make_single_slidingwindow_modelwrapper(
-    k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt
+    k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt, dw=0
 ):
     odt = idt
     inp = helper.make_tensor_value_info(
@@ -102,6 +106,7 @@ def make_single_slidingwindow_modelwrapper(
         Stride=stride,
         inputDataType=idt.name,
         outputDataType=odt.name,
+        depthwise=dw,
     )
     graph = helper.make_graph(
         nodes=[SlidingWindow_node],
@@ -126,23 +131,29 @@ def prepare_inputs(input_tensor):
 # input datatype
 @pytest.mark.parametrize("idt", [DataType.BIPOLAR, DataType.INT2])
 # kernel size
-@pytest.mark.parametrize("k", [2, 4])
+@pytest.mark.parametrize("k", [2, 3])
 # input dimension
-@pytest.mark.parametrize("ifm_dim", [4, 6, 8])
+@pytest.mark.parametrize("ifm_dim", [6, 8])
 # input channels
-@pytest.mark.parametrize("ifm_ch", [2, 4])  # , 2, 3, 4])
+@pytest.mark.parametrize("ifm_ch", [2, 4])
 # Stride
 @pytest.mark.parametrize("stride", [1, 2])
 # execution mode
 @pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
 # input channel parallelism ("SIMD")
 @pytest.mark.parametrize("simd", [1, 2])
-def test_fpgadataflow_slidingwindow(idt, k, ifm_dim, ifm_ch, stride, exec_mode, simd):
+# depthwise
+@pytest.mark.parametrize("dw", [0, 1])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_fpgadataflow_slidingwindow(
+    idt, k, ifm_dim, ifm_ch, stride, exec_mode, simd, dw
+):
     ofm_dim = int(((ifm_dim - k) / stride) + 1)
 
     x = gen_finn_dt_tensor(idt, (1, ifm_dim, ifm_dim, ifm_ch))
     model = make_single_slidingwindow_modelwrapper(
-        k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt
+        k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt, dw
     )
 
     if exec_mode == "cppsim":
@@ -166,6 +177,21 @@ def test_fpgadataflow_slidingwindow(idt, k, ifm_dim, ifm_ch, stride, exec_mode,
         k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt
     )
     y_expected = oxe.execute_onnx(golden, input_dict)["outp"]
-    # if idt == DataType.BIPOLAR:
-    #     y_expected = 2 * y_expected - 1
-    assert (y_produced == y_expected).all()
+    if dw == 0:
+        assert (y_produced == y_expected).all()
+    else:
+        y_expected = y_expected.reshape(
+            1, ofm_dim, ofm_dim, k * k, ifm_ch // simd, simd
+        )
+        y_expected = y_expected.transpose(0, 1, 2, 4, 3, 5)
+        y_expected = y_expected.reshape(1, ofm_dim, ofm_dim, ifm_ch * k * k)
+        assert (y_produced == y_expected).all()
+
+    if exec_mode == "rtlsim":
+        node = model.get_nodes_by_op_type("ConvolutionInputGenerator")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_duplicatestreams.py b/tests/fpgadataflow/test_fpgadataflow_duplicatestreams.py
new file mode 100644
index 0000000000000000000000000000000000000000..47401ddb9546d5b32a5d36c6731981aabe0ca7cd
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_duplicatestreams.py
@@ -0,0 +1,140 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import numpy as np
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+
+def make_dupstreams_modelwrapper(ch, pe, idim, idt):
+    shape = [1, idim, idim, ch]
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, shape)
+    outp0 = helper.make_tensor_value_info("outp0", TensorProto.FLOAT, shape)
+    outp1 = helper.make_tensor_value_info("outp1", TensorProto.FLOAT, shape)
+
+    dupstrm_node = helper.make_node(
+        "DuplicateStreams_Batch",
+        ["inp"],
+        ["outp0", "outp1"],
+        domain="finn",
+        backend="fpgadataflow",
+        NumChannels=ch,
+        PE=pe,
+        inputDataType=idt.name,
+        numInputVectors=[1, idim, idim],
+    )
+    graph = helper.make_graph(
+        nodes=[dupstrm_node], name="graph", inputs=[inp], outputs=[outp0, outp1]
+    )
+
+    model = helper.make_model(graph, producer_name="addstreams-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+
+    return model
+
+
+def prepare_inputs(input_tensor, idt):
+    return {"inp": input_tensor}
+
+
+# data type
+@pytest.mark.parametrize("idt", [DataType.INT4, DataType.UINT16])
+# channels
+@pytest.mark.parametrize("ch", [64])
+# folding
+@pytest.mark.parametrize("fold", [-1, 2, 1])
+# image dimension
+@pytest.mark.parametrize("imdim", [7])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+def test_fpgadataflow_duplicatestreams(idt, ch, fold, imdim, exec_mode):
+    if fold == -1:
+        pe = 1
+    else:
+        pe = ch // fold
+    assert ch % pe == 0
+
+    # generate input data
+    x = gen_finn_dt_tensor(idt, (1, imdim, imdim, ch))
+
+    model = make_dupstreams_modelwrapper(ch, pe, imdim, idt)
+
+    if exec_mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        model = model.transform(SetExecMode("rtlsim"))
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP("xc7z020clg400-1", 5))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # prepare input data and execute
+    input_dict = prepare_inputs(x, idt)
+    output_dict = oxe.execute_onnx(model, input_dict)
+    y0 = output_dict["outp0"]
+    y1 = output_dict["outp1"]
+    expected_y = x
+
+    assert (y0 == expected_y).all(), exec_mode + " failed"
+    assert (y1 == expected_y).all(), exec_mode + " failed"
+
+    if exec_mode == "rtlsim":
+        node = model.get_nodes_by_op_type("DuplicateStreams_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_dwc.py b/tests/fpgadataflow/test_fpgadataflow_dwc.py
index a230fb4201673e3bf0a31cf9ec82f21250fd9e40..1d83f7a23cd3bad757e772055d242799cf22b0da 100644
--- a/tests/fpgadataflow/test_fpgadataflow_dwc.py
+++ b/tests/fpgadataflow/test_fpgadataflow_dwc.py
@@ -55,6 +55,8 @@ def prepare_inputs(input_tensor, dt):
 @pytest.mark.parametrize("OUTWidth", [2, 4])
 # finn_dtype
 @pytest.mark.parametrize("finn_dtype", [DataType.BIPOLAR, DataType.INT2])
+@pytest.mark.slow
+@pytest.mark.vivado
 def test_fpgadataflow_dwc_rtlsim(Shape, INWidth, OUTWidth, finn_dtype):
 
     # generate input data
diff --git a/tests/fpgadataflow/test_fpgadataflow_fclayer.py b/tests/fpgadataflow/test_fpgadataflow_fclayer.py
index 416d96d5dbfa1125d878eb8339ae38f5d572d1ce..feff580002c317a3a8754dba2b6a9f291e408ac5 100644
--- a/tests/fpgadataflow/test_fpgadataflow_fclayer.py
+++ b/tests/fpgadataflow/test_fpgadataflow_fclayer.py
@@ -46,9 +46,7 @@ from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
 from finn.transformation.general import GiveUniqueNodeNames
 from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
 from finn.util.basic import calculate_signed_dot_prod_range, gen_finn_dt_tensor
-from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
-    ReplaceVerilogRelPaths,
-)
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
 
 
 def make_single_fclayer_modelwrapper(W, pe, simd, wdt, idt, odt, T=None, tdt=None):
@@ -134,7 +132,7 @@ def prepare_inputs(input_tensor, idt, wdt):
 
 
 # mem_mode: const or decoupled
-@pytest.mark.parametrize("mem_mode", ["const", "decoupled"])
+@pytest.mark.parametrize("mem_mode", ["const", "decoupled", "external"])
 # activation: None or DataType
 @pytest.mark.parametrize("act", [None, DataType.BIPOLAR, DataType.INT4])
 # weight datatype
@@ -149,6 +147,8 @@ def prepare_inputs(input_tensor, idt, wdt):
 @pytest.mark.parametrize("mw", [16])
 # HLS matrix height (output features)
 @pytest.mark.parametrize("mh", [16])
+@pytest.mark.slow
+@pytest.mark.vivado
 def test_fpgadataflow_fclayer_cppsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
     if nf == -1:
         nf = mh
@@ -219,7 +219,7 @@ def test_fpgadataflow_fclayer_cppsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
 
 
 # mem_mode: const or decoupled
-@pytest.mark.parametrize("mem_mode", ["const", "decoupled"])
+@pytest.mark.parametrize("mem_mode", ["const", "decoupled", "external"])
 # activation: None or DataType
 @pytest.mark.parametrize("act", [None, DataType.BIPOLAR, DataType.INT4])
 # weight datatype
@@ -234,6 +234,8 @@ def test_fpgadataflow_fclayer_cppsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
 @pytest.mark.parametrize("mw", [16])
 # HLS matrix height (output features)
 @pytest.mark.parametrize("mh", [16])
+@pytest.mark.slow
+@pytest.mark.vivado
 def test_fpgadataflow_fclayer_rtlsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
     if nf == -1:
         nf = mh
@@ -299,7 +301,6 @@ def test_fpgadataflow_fclayer_rtlsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
     model = model.transform(GiveUniqueNodeNames())
     model = model.transform(PrepareIP("xc7z020clg400-1", 5))
     model = model.transform(HLSSynthIP())
-    model = model.transform(ReplaceVerilogRelPaths())
     model = model.transform(PrepareRTLSim())
     y_produced = oxe.execute_onnx(model, input_dict)["outp"]
     assert (y_produced.reshape(y_expected.shape) == y_expected).all(), "rtlsim failed"
@@ -307,6 +308,14 @@ def test_fpgadataflow_fclayer_rtlsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
     hls_synt_res_est = model.analysis(hls_synth_res_estimation)
     assert "StreamingFCLayer_Batch_0" in hls_synt_res_est
 
+    node = model.get_nodes_by_op_type("StreamingFCLayer_Batch")[0]
+    inst = getCustomOp(node)
+    cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+    exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+    exp_cycles = exp_cycles_dict[node.name]
+    assert np.isclose(exp_cycles, cycles_rtlsim, atol=15)
+    assert exp_cycles != 0
+
 
 # mem_mode: const or decoupled
 @pytest.mark.parametrize("mem_mode", ["decoupled"])
@@ -324,7 +333,8 @@ def test_fpgadataflow_fclayer_rtlsim(mem_mode, idt, wdt, act, nf, sf, mw, mh):
 @pytest.mark.parametrize("mw", [128])
 # HLS matrix height (output features)
 @pytest.mark.parametrize("mh", [128])
-def test_fpgadataflow_fclayer_large_depth_decoupled_mode(
+@pytest.mark.vivado
+def test_fpgadataflow_fclayer_large_depth_decoupled_mode_rtlsim(
     mem_mode, idt, wdt, act, nf, sf, mw, mh
 ):
     if nf == -1:
@@ -391,10 +401,17 @@ def test_fpgadataflow_fclayer_large_depth_decoupled_mode(
     model = model.transform(GiveUniqueNodeNames())
     model = model.transform(PrepareIP("xc7z020clg400-1", 5))
     model = model.transform(HLSSynthIP())
-    model = model.transform(ReplaceVerilogRelPaths())
     model = model.transform(PrepareRTLSim())
     y_produced = oxe.execute_onnx(model, input_dict)["outp"]
     assert (y_produced.reshape(y_expected.shape) == y_expected).all(), "rtlsim failed"
 
     hls_synt_res_est = model.analysis(hls_synth_res_estimation)
     assert "StreamingFCLayer_Batch_0" in hls_synt_res_est
+
+    node = model.get_nodes_by_op_type("StreamingFCLayer_Batch")[0]
+    inst = getCustomOp(node)
+    cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+    exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+    exp_cycles = exp_cycles_dict[node.name]
+    assert np.isclose(exp_cycles, cycles_rtlsim, atol=15)
+    assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_fifo.py b/tests/fpgadataflow/test_fpgadataflow_fifo.py
index fe27d7d4273be2b938e5bf70338bb374ce16b6b2..53de417eac175d8b700e84aecb304895a5942c16 100644
--- a/tests/fpgadataflow/test_fpgadataflow_fifo.py
+++ b/tests/fpgadataflow/test_fpgadataflow_fifo.py
@@ -2,36 +2,19 @@ import pytest
 import os
 
 from onnx import TensorProto, helper
-
 from finn.core.datatype import DataType
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
-from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
-
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
 from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
-
 from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
 from finn.transformation.general import GiveUniqueNodeNames
-
 from finn.util.basic import gen_finn_dt_tensor
-
 import finn.core.onnx_exec as oxe
-from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
-from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
-from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
-from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
-from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject
-from finn.transformation.fpgadataflow.replace_verilog_relpaths import (
-    ReplaceVerilogRelPaths,
-)
-from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject
-from finn.util.basic import pynq_part_map
-from finn.core.throughput_test import throughput_test
 
 
 build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
-test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
-test_fpga_part = pynq_part_map[test_pynq_board]
+test_fpga_part = "xc7z020clg400-1"
 target_clk_ns = 10
 
 
@@ -76,6 +59,8 @@ def prepare_inputs(input_tensor, dt):
 @pytest.mark.parametrize("depth", [16])
 # finn_dtype
 @pytest.mark.parametrize("finn_dtype", [DataType.BIPOLAR])  # , DataType.INT2])
+@pytest.mark.slow
+@pytest.mark.vivado
 def test_fpgadataflow_fifo_rtlsim(Shape, folded_shape, depth, finn_dtype):
 
     # generate input data
@@ -85,7 +70,6 @@ def test_fpgadataflow_fifo_rtlsim(Shape, folded_shape, depth, finn_dtype):
     model = make_single_fifo_modelwrapper(Shape, depth, folded_shape, finn_dtype)
 
     model = model.transform(SetExecMode("rtlsim"))
-    model = model.transform(InsertTLastMarker())
     model = model.transform(GiveUniqueNodeNames())
     model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
     model = model.transform(HLSSynthIP())
@@ -96,29 +80,3 @@ def test_fpgadataflow_fifo_rtlsim(Shape, folded_shape, depth, finn_dtype):
     ).all(), """The output values are not the same as the
        input values anymore."""
     assert y.shape == tuple(Shape), """The output shape is incorrect."""
-
-    model = model.transform(ReplaceVerilogRelPaths())
-    model = model.transform(CreateStitchedIP(test_fpga_part))
-    model = model.transform(MakePYNQProject(test_pynq_board))
-    model = model.transform(SynthPYNQProject())
-    model = model.transform(MakePYNQDriver())
-    ip = os.environ["PYNQ_IP"]
-    username = os.getenv("PYNQ_USERNAME", "xilinx")
-    password = os.getenv("PYNQ_PASSWORD", "xilinx")
-    port = os.getenv("PYNQ_PORT", 22)
-    target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
-    model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))
-
-    res = throughput_test(model)
-    expected_dict = {}
-    expected_dict["runtime[ms]"] = []
-    expected_dict["throughput[images/s]"] = []
-    expected_dict["DRAM_in_bandwidth[Mb/s]"] = []
-    expected_dict["DRAM_out_bandwidth[Mb/s]"] = []
-    for key in expected_dict:
-        assert (
-            key in res
-        ), """Throughput test not successful, no value for {}
-        in result dictionary""".format(
-            key
-        )
diff --git a/tests/fpgadataflow/test_fpgadataflow_fmpadding.py b/tests/fpgadataflow/test_fpgadataflow_fmpadding.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef4f17998dbb09d31cdc9b3c89afafd10653fd28
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_fmpadding.py
@@ -0,0 +1,136 @@
+import pytest
+import os
+import numpy as np
+
+from onnx import TensorProto, helper
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.util.basic import gen_finn_dt_tensor
+import finn.core.onnx_exec as oxe
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+from finn.util.basic import pynq_part_map
+
+test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
+test_fpga_part = pynq_part_map[test_pynq_board]
+target_clk_ns = 10
+
+
+def make_single_fmpadding_modelwrapper(idim, padding, num_ch, simd, idt, pad_style):
+    assert pad_style == 2, "only pad_style == 2 supported in hlslib"
+    assert padding > 0, "Output dim should be greater than input dim"
+    odim = idim + padding
+
+    inp = helper.make_tensor_value_info(
+        "inp", TensorProto.FLOAT, [1, idim, idim, num_ch]
+    )
+    outp = helper.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, [1, odim, odim, num_ch]
+    )
+
+    FMPadding = helper.make_node(
+        "FMPadding_Batch",
+        ["inp"],
+        ["outp"],
+        domain="finn",
+        backend="fpgadataflow",
+        ImgDim=idim,
+        Padding=padding,
+        NumChannels=num_ch,
+        inputDataType=str(idt.name),
+        PaddingStyle=pad_style,
+        numInputVectors=1,
+        SIMD=simd,
+    )
+
+    graph = helper.make_graph(
+        nodes=[FMPadding], name="fmpadding_graph", inputs=[inp], outputs=[outp]
+    )
+
+    model = helper.make_model(graph, producer_name="fmpadding-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype("outp", idt)
+
+    return model
+
+
+# input image dimension
+@pytest.mark.parametrize("idim", [8])
+# number of rows and number of cols to add
+@pytest.mark.parametrize("pad", [2, 3])
+# number of channels
+@pytest.mark.parametrize("num_ch", [2, 4])
+# Input parallelism
+@pytest.mark.parametrize("simd", [1, 2])
+# PaddingStyle: selects behavior when (odim-idim)%2 != 0
+@pytest.mark.parametrize("pad_style", [2])
+# FINN input datatype
+@pytest.mark.parametrize("idt", [DataType.INT2, DataType.INT4])
+# execution mode
+@pytest.mark.parametrize("mode", ["cppsim", "rtlsim"])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_fpgadataflow_fmpadding(idim, pad, num_ch, simd, pad_style, idt, mode):
+    if num_ch % simd != 0:
+        pytest.skip(" num_ch % simd != 0, skipping")
+    # generate input data
+    x = gen_finn_dt_tensor(idt, [1, idim, idim, num_ch])
+    input_dict = {"inp": x}
+    odim = idim + pad
+
+    model = make_single_fmpadding_modelwrapper(idim, pad, num_ch, simd, idt, pad_style)
+    model = model.transform(InferShapes())
+    model = model.transform(SetExecMode(mode))
+    model = model.transform(GiveUniqueNodeNames())
+    if mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+    elif mode == "rtlsim":
+        model = model.transform(PrepareIP(test_fpga_part, target_clk_ns))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    y_produced = oxe.execute_onnx(model, input_dict)["outp"]
+    expected_oshape = (1, odim, odim, num_ch)
+    assert y_produced.shape == expected_oshape
+
+    # calculate reference
+    # calculate correct pad according to parameters
+    if pad_style == 2:
+        if pad % 2 == 0:
+            pad_up = pad // 2
+            pad_left = pad // 2
+        else:
+            pad_up = pad // 2 + 1
+            pad_left = pad // 2 + 1
+    else:
+        pad_up = pad // 2
+        pad_left = pad // 2
+
+    pad_down = pad - pad_up
+    pad_right = pad - pad_left
+
+    y_expected = np.pad(
+        x, ((0, 0), (pad_up, pad_down), (pad_left, pad_right), (0, 0)), "constant"
+    )
+
+    assert (y_produced == y_expected).all()
+
+    if mode == "rtlsim":
+        node = model.get_nodes_by_op_type("FMPadding_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_globalaccpool.py b/tests/fpgadataflow/test_fpgadataflow_globalaccpool.py
new file mode 100644
index 0000000000000000000000000000000000000000..191e00022a0b0ab11fcf4d1a476442cbd824408d
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_globalaccpool.py
@@ -0,0 +1,135 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import numpy as np
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+
+def make_accpool_modelwrapper(ch, pe, idim, idt):
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, idim, idim, ch])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, 1, 1, ch])
+
+    accpool_node = helper.make_node(
+        "GlobalAccPool_Batch",
+        ["inp"],
+        ["outp"],
+        domain="finn",
+        backend="fpgadataflow",
+        NumChannels=ch,
+        PE=pe,
+        inputDataType=idt.name,
+        numInputVectors=[1, idim, idim],
+    )
+    graph = helper.make_graph(
+        nodes=[accpool_node], name="graph", inputs=[inp], outputs=[outp]
+    )
+
+    model = helper.make_model(graph, producer_name="thresholding-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+
+    return model
+
+
+def prepare_inputs(input_tensor, idt):
+    return {"inp": input_tensor}
+
+
+# data type
+@pytest.mark.parametrize("idt", [DataType.UINT4, DataType.UINT16])
+# channels
+@pytest.mark.parametrize("ch", [64])
+# folding
+@pytest.mark.parametrize("fold", [-1, 2, 1])
+# image dimension
+@pytest.mark.parametrize("imdim", [7])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+def test_fpgadataflow_globalaccpool(idt, ch, fold, imdim, exec_mode):
+    if fold == -1:
+        pe = 1
+    else:
+        pe = ch // fold
+    assert ch % pe == 0
+
+    # generate input data
+    x = gen_finn_dt_tensor(idt, (1, imdim, imdim, ch))
+
+    model = make_accpool_modelwrapper(ch, pe, imdim, idt)
+
+    if exec_mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        model = model.transform(SetExecMode("rtlsim"))
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP("xc7z020clg400-1", 5))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # prepare input data and execute
+    input_dict = prepare_inputs(x, idt)
+    y = oxe.execute_onnx(model, input_dict)["outp"]
+    expected_y = np.sum(x, axis=(1, 2)).flatten()
+
+    assert (y == expected_y).all(), exec_mode + " failed"
+
+    if exec_mode == "rtlsim":
+        node = model.get_nodes_by_op_type("GlobalAccPool_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        # commented out, needs performance debug:
+        # test_fpgadataflow_globalaccpool[rtlsim-7-1-64-DataType.UINT4]
+        # assert False where False =
+        # <function isclose at 0x7eff26d5ca60>(50, 103, atol=(0.1 * 103))
+        # assert np.isclose(exp_cycles, cycles_rtlsim, atol=0.1 * cycles_rtlsim)
+        assert exp_cycles != 0
+        assert cycles_rtlsim != 0
diff --git a/tests/fpgadataflow/test_fpgadataflow_ip_stitch.py b/tests/fpgadataflow/test_fpgadataflow_ipstitch.py
similarity index 58%
rename from tests/fpgadataflow/test_fpgadataflow_ip_stitch.py
rename to tests/fpgadataflow/test_fpgadataflow_ipstitch.py
index f26ba428bf4cbe174c048dcd35a4d63dc58519ab..2e9d25778147b1aa774c56f877c35d094c62e2bc 100644
--- a/tests/fpgadataflow/test_fpgadataflow_ip_stitch.py
+++ b/tests/fpgadataflow/test_fpgadataflow_ipstitch.py
@@ -45,13 +45,22 @@ from finn.transformation.fpgadataflow.create_dataflow_partition import (
 from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
 from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker
 from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
-from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
-from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject
-from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject
-import finn.transformation.fpgadataflow.replace_verilog_relpaths as rvp
 from finn.transformation.general import GiveUniqueNodeNames
-from finn.util.basic import gen_finn_dt_tensor, pynq_part_map
+from finn.util.basic import (
+    gen_finn_dt_tensor,
+    pynq_part_map,
+    alveo_part_map,
+    alveo_default_platform,
+)
 from finn.util.fpgadataflow import pyverilate_stitched_ip
+from finn.util.test import load_test_checkpoint_or_skip
+from finn.transformation.fpgadataflow.synth_ooc import SynthOutOfContext
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.fpgadataflow.insert_iodma import InsertIODMA
+from finn.transformation.fpgadataflow.floorplan import Floorplan
+from finn.transformation.fpgadataflow.vitis_build import VitisBuild
+from finn.transformation.fpgadataflow.make_zynq_proj import ZynqBuild
+
 
 test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
 test_fpga_part = pynq_part_map[test_pynq_board]
@@ -59,7 +68,7 @@ test_fpga_part = pynq_part_map[test_pynq_board]
 ip_stitch_model_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
 
 
-def create_one_fc_model():
+def create_one_fc_model(mem_mode="const"):
     # create a model with a StreamingFCLayer instance with no activation
     # the wider range of the full accumulator makes debugging a bit easier
     wdt = DataType.INT2
@@ -92,6 +101,7 @@ def create_one_fc_model():
         ActVal=actval,
         binaryXnorMode=binary_xnor_mode,
         noActivation=no_act,
+        mem_mode=mem_mode,
     )
 
     graph = helper.make_graph(
@@ -113,7 +123,7 @@ def create_one_fc_model():
     return model
 
 
-def create_two_fc_model():
+def create_two_fc_model(mem_mode="decoupled"):
     # create a model with two StreamingFCLayer instances
     wdt = DataType.INT2
     idt = DataType.INT32
@@ -146,7 +156,7 @@ def create_two_fc_model():
         ActVal=actval,
         binaryXnorMode=binary_xnor_mode,
         noActivation=no_act,
-        mem_mode="decoupled",
+        mem_mode=mem_mode,
     )
 
     fc1 = helper.make_node(
@@ -166,7 +176,7 @@ def create_two_fc_model():
         ActVal=actval,
         binaryXnorMode=binary_xnor_mode,
         noActivation=no_act,
-        mem_mode="decoupled",
+        mem_mode=mem_mode,
     )
 
     graph = helper.make_graph(
@@ -196,15 +206,15 @@ def create_two_fc_model():
     return model
 
 
-# exec_mode of StreamingDataflowPartition
-# @pytest.mark.parametrize("exec_mode", ["remote_pynq"]) #, "rtlsim"])
-def test_fpgadataflow_ipstitch_gen_model():  # exec_mode):
-    model = create_one_fc_model()
+@pytest.mark.parametrize("mem_mode", ["const", "decoupled"])
+@pytest.mark.vivado
+def test_fpgadataflow_ipstitch_gen_model(mem_mode):
+    model = create_one_fc_model(mem_mode)
     if model.graph.node[0].op_type == "StreamingDataflowPartition":
         sdp_node = getCustomOp(model.graph.node[0])
         assert sdp_node.__class__.__name__ == "StreamingDataflowPartition"
         assert os.path.isfile(sdp_node.get_nodeattr("model"))
-        model = ModelWrapper(sdp_node.get_nodeattr("model"))
+        model = load_test_checkpoint_or_skip(sdp_node.get_nodeattr("model"))
         model.set_metadata_prop("exec_mode", "remote_pynq")
     model = model.transform(InsertTLastMarker())
     model = model.transform(GiveUniqueNodeNames())
@@ -212,15 +222,18 @@ def test_fpgadataflow_ipstitch_gen_model():  # exec_mode):
     model = model.transform(HLSSynthIP())
     assert model.graph.node[0].op_type == "StreamingFCLayer_Batch"
     assert model.graph.node[-1].op_type == "TLastMarker"
-    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_gen_model.onnx")
+    model.save(
+        ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_gen_model_%s.onnx" % mem_mode
+    )
 
 
-def test_fpgadataflow_ipstitch_do_stitch():
-    model = ModelWrapper(
-        ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_gen_model.onnx"
+@pytest.mark.parametrize("mem_mode", ["const", "decoupled"])
+@pytest.mark.vivado
+def test_fpgadataflow_ipstitch_do_stitch(mem_mode):
+    model = load_test_checkpoint_or_skip(
+        ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_gen_model_%s.onnx" % mem_mode
     )
-    model = model.transform(rvp.ReplaceVerilogRelPaths())
-    model = model.transform(CreateStitchedIP(test_fpga_part))
+    model = model.transform(CreateStitchedIP(test_fpga_part, 5))
     vivado_stitch_proj_dir = model.get_metadata_prop("vivado_stitch_proj")
     assert vivado_stitch_proj_dir is not None
     assert os.path.isdir(vivado_stitch_proj_dir)
@@ -228,43 +241,47 @@ def test_fpgadataflow_ipstitch_do_stitch():
     vivado_stitch_vlnv = model.get_metadata_prop("vivado_stitch_vlnv")
     assert vivado_stitch_vlnv is not None
     assert vivado_stitch_vlnv == "xilinx_finn:finn:finn_design:1.0"
-    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch.onnx")
+    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch_%s.onnx" % mem_mode)
 
 
-def test_fpgadataflow_ipstitch_rtlsim():
-    model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch.onnx")
+@pytest.mark.parametrize("mem_mode", ["const", "decoupled"])
+@pytest.mark.vivado
+def test_fpgadataflow_ipstitch_rtlsim(mem_mode):
+    model = load_test_checkpoint_or_skip(
+        ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch_%s.onnx" % mem_mode
+    )
     model.set_metadata_prop("rtlsim_trace", "whole_trace.vcd")
     sim = pyverilate_stitched_ip(model)
     exp_io = [
-        "ap_clk_0",
-        "ap_rst_n_0",
-        "in0_V_V_0_tdata",
-        "in0_V_V_0_tready",
-        "in0_V_V_0_tvalid",
-        "out_r_0_tdata",
-        "out_r_0_tkeep",
-        "out_r_0_tlast",
-        "out_r_0_tready",
-        "out_r_0_tvalid",
-        "s_axi_control_0_araddr",
-        "s_axi_control_0_arready",
-        "s_axi_control_0_arvalid",
-        "s_axi_control_0_awaddr",
-        "s_axi_control_0_awready",
-        "s_axi_control_0_awvalid",
-        "s_axi_control_0_bready",
-        "s_axi_control_0_bresp",
-        "s_axi_control_0_bvalid",
-        "s_axi_control_0_rdata",
-        "s_axi_control_0_rready",
-        "s_axi_control_0_rresp",
-        "s_axi_control_0_rvalid",
-        "s_axi_control_0_wdata",
-        "s_axi_control_0_wready",
-        "s_axi_control_0_wstrb",
-        "s_axi_control_0_wvalid",
+        "ap_clk",
+        "ap_rst_n",
+        "s_axis_0_tdata",
+        "s_axis_0_tready",
+        "s_axis_0_tvalid",
+        "m_axis_0_tdata",
+        "m_axis_0_tkeep",
+        "m_axis_0_tlast",
+        "m_axis_0_tready",
+        "m_axis_0_tvalid",
+        "s_axi_control_araddr",
+        "s_axi_control_arready",
+        "s_axi_control_arvalid",
+        "s_axi_control_awaddr",
+        "s_axi_control_awready",
+        "s_axi_control_awvalid",
+        "s_axi_control_bready",
+        "s_axi_control_bresp",
+        "s_axi_control_bvalid",
+        "s_axi_control_rdata",
+        "s_axi_control_rready",
+        "s_axi_control_rresp",
+        "s_axi_control_rvalid",
+        "s_axi_control_wdata",
+        "s_axi_control_wready",
+        "s_axi_control_wstrb",
+        "s_axi_control_wvalid",
     ]
-    assert dir(sim.io) == exp_io
+    assert sorted(dir(sim.io)) == sorted(exp_io)
     model.set_metadata_prop("exec_mode", "rtlsim")
     idt = model.get_tensor_datatype("inp")
     ishape = model.get_tensor_shape("inp")
@@ -275,81 +292,110 @@ def test_fpgadataflow_ipstitch_rtlsim():
     assert (rtlsim_res == x).all()
 
 
-def test_fpgadataflow_ipstitch_pynq_projgen():
-    model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch.onnx")
-    model = model.transform(MakePYNQProject(test_pynq_board))
-    vivado_pynq_proj_dir = model.get_metadata_prop("vivado_pynq_proj")
-    assert vivado_pynq_proj_dir is not None
-    assert os.path.isdir(vivado_pynq_proj_dir)
-    model.save(ip_stitch_model_dir + "/test_fpgadataflow_pynq_projgen.onnx")
-
-
-def test_fpgadataflow_ipstitch_pynq_synth():
-    model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_pynq_projgen.onnx")
-    model = model.transform(SynthPYNQProject())
-    bitfile = model.get_metadata_prop("vivado_pynq_bitfile")
-    assert bitfile is not None
-    assert os.path.isfile(bitfile)
-    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_synth.onnx")
-
-
-def test_fpgadataflow_ipstitch_pynq_driver():
-    model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_pynq_projgen.onnx")
-    model = model.transform(MakePYNQDriver())
-    driver_dir = model.get_metadata_prop("pynq_driver_dir")
-    assert driver_dir is not None
-    assert os.path.isdir(driver_dir)
-    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_driver.onnx")
-
-
-def test_fpgadataflow_ipstitch_pynq_deployment_folder():
+@pytest.mark.parametrize("mem_mode", ["const", "decoupled"])
+@pytest.mark.vivado
+@pytest.mark.slow
+def test_fpgadataflow_ipstitch_synth_ooc(mem_mode):
+    model = load_test_checkpoint_or_skip(
+        ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch_%s.onnx" % mem_mode
+    )
+    model = model.transform(SynthOutOfContext(test_fpga_part, 5))
+    ret = model.get_metadata_prop("res_total_ooc_synth")
+    assert ret is not None
+    # example expected output: (details may differ based on Vivado version etc)
+    # "{'vivado_proj_folder': ...,
+    # 'LUT': 708.0, 'FF': 1516.0, 'DSP': 0.0, 'BRAM': 0.0, 'WNS': 0.152, '': 0,
+    # 'fmax_mhz': 206.27062706270627}"
+    ret = eval(ret)
+    assert ret["LUT"] > 0
+    assert ret["FF"] > 0
+    assert ret["DSP"] == 0
+    assert ret["BRAM"] == 0
+    assert ret["fmax_mhz"] > 100
+
+
+def test_fpgadataflow_ipstitch_iodma_floorplan():
+    model = create_one_fc_model()
+    if model.graph.node[0].op_type == "StreamingDataflowPartition":
+        sdp_node = getCustomOp(model.graph.node[0])
+        assert sdp_node.__class__.__name__ == "StreamingDataflowPartition"
+        assert os.path.isfile(sdp_node.get_nodeattr("model"))
+        model = load_test_checkpoint_or_skip(sdp_node.get_nodeattr("model"))
+    model = model.transform(InferDataLayouts())
+    model = model.transform(InsertIODMA())
+    model = model.transform(Floorplan())
+    assert getCustomOp(model.graph.node[0]).get_nodeattr("partition_id") == 0
+    assert getCustomOp(model.graph.node[1]).get_nodeattr("partition_id") == 2
+    assert getCustomOp(model.graph.node[2]).get_nodeattr("partition_id") == 1
+    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_iodma_floorplan.onnx")
+
+
+# board
+@pytest.mark.parametrize("board", ["U250"])
+# clock period
+@pytest.mark.parametrize("period_ns", [5])
+# override mem_mode to external
+@pytest.mark.parametrize("extw", [True, False])
+@pytest.mark.slow
+@pytest.mark.vivado
+@pytest.mark.vitis
+def test_fpgadataflow_ipstitch_vitis(board, period_ns, extw):
+    if "VITIS_PATH" not in os.environ:
+        pytest.skip("VITIS_PATH not set")
+    platform = alveo_default_platform[board]
+    fpga_part = alveo_part_map[board]
+    model = create_two_fc_model("external" if extw else "decoupled")
+    if model.graph.node[0].op_type == "StreamingDataflowPartition":
+        sdp_node = getCustomOp(model.graph.node[0])
+        assert sdp_node.__class__.__name__ == "StreamingDataflowPartition"
+        assert os.path.isfile(sdp_node.get_nodeattr("model"))
+        model = load_test_checkpoint_or_skip(sdp_node.get_nodeattr("model"))
+    model = model.transform(VitisBuild(fpga_part, period_ns, platform))
+    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_vitis.onnx")
+    assert model.get_metadata_prop("platform") == "alveo"
+    assert os.path.isdir(model.get_metadata_prop("vitis_link_proj"))
+    assert os.path.isfile(model.get_metadata_prop("bitfile"))
+
+
+# board
+@pytest.mark.parametrize("board", ["Pynq-Z1"])
+@pytest.mark.slow
+@pytest.mark.vivado
+def test_fpgadataflow_ipstitch_zynqbuild(board):
+    model = create_two_fc_model()
+    if model.graph.node[0].op_type == "StreamingDataflowPartition":
+        sdp_node = getCustomOp(model.graph.node[0])
+        assert sdp_node.__class__.__name__ == "StreamingDataflowPartition"
+        assert os.path.isfile(sdp_node.get_nodeattr("model"))
+        model = load_test_checkpoint_or_skip(sdp_node.get_nodeattr("model"))
+    # generate inputs for remote exec
+    iname = "inp"
+    idt = model.get_tensor_datatype(iname)
+    ishape = model.get_tensor_shape(iname)
+    x = gen_finn_dt_tensor(idt, ishape)
+    # bitfile using ZynqBuild
+    model = model.transform(ZynqBuild(board, 10))
+    model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_customzynq.onnx")
+
+    bitfile_name = model.get_metadata_prop("bitfile")
+    assert bitfile_name is not None
+    assert os.path.isfile(bitfile_name)
+    # deployment
     try:
         ip = os.environ["PYNQ_IP"]  # no default for this one; skip if not defined
         if ip == "":
             pytest.skip("PYNQ board IP address not specified")
-        model = ModelWrapper(
-            ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_driver.onnx"
-        )
         username = os.getenv("PYNQ_USERNAME", "xilinx")
         password = os.getenv("PYNQ_PASSWORD", "xilinx")
         port = os.getenv("PYNQ_PORT", 22)
         target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
         model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))
-        pynq_ip = model.get_metadata_prop("pynq_ip")
-        pynq_username = model.get_metadata_prop("pynq_username")
-        pynq_password = model.get_metadata_prop("pynq_password")
-        pynq_target_dir = model.get_metadata_prop("pynq_target_dir")
-
-        assert pynq_ip == ip
-        assert pynq_username == username
-        assert pynq_password == password
-        assert pynq_target_dir == target_dir
-
         deployment_dir = model.get_metadata_prop("pynq_deploy_dir")
         assert deployment_dir is not None
         assert os.path.isdir(deployment_dir)
-
-        model.save(
-            ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_deployment.onnx"
-        )
-    except KeyError:
-        pytest.skip("PYNQ board IP address not specified")
-
-
-def test_fpgadataflow_ipstitch_remote_execution():
-    try:
-        ip = os.environ["PYNQ_IP"]  # NOQA
-        if ip == "":
-            pytest.skip("PYNQ board IP address not specified")
-        model = ModelWrapper(
-            ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_deployment.onnx"
-        )
-        iname = "inp"
-        idt = model.get_tensor_datatype(iname)
-        ishape = model.get_tensor_shape(iname)
-        x = gen_finn_dt_tensor(idt, ishape)
-        input_dict = {"inp": x}
+        # remote exec
+        input_dict = {"global_in": x}
         outp = execute_onnx(model, input_dict)
-        assert np.isclose(outp["outp"], x).all()
+        assert np.isclose(outp["global_out"], x).all()
     except KeyError:
         pytest.skip("PYNQ board IP address not specified")
diff --git a/tests/fpgadataflow/test_fpgadataflow_labelselect.py b/tests/fpgadataflow/test_fpgadataflow_labelselect.py
new file mode 100644
index 0000000000000000000000000000000000000000..dae91b94120e94eb86bbc4b958decd581f36e671
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_labelselect.py
@@ -0,0 +1,124 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import numpy as np
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.util.test import soft_verify_topk
+
+
+def make_labelselect_modelwrapper(labels, pe, k, idt):
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, labels])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, k])
+
+    labelselect_node = helper.make_node(
+        "LabelSelect_Batch",
+        ["inp"],
+        ["outp"],
+        domain="finn",
+        backend="fpgadataflow",
+        Labels=labels,
+        PE=pe,
+        K=k,
+        inputDataType=idt.name,
+    )
+    graph = helper.make_graph(
+        nodes=[labelselect_node], name="graph", inputs=[inp], outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="thresholding-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+    odt = DataType.get_smallest_possible(labels - 1)
+    model.set_tensor_datatype("outp", odt)
+
+    return model
+
+
+def prepare_inputs(input_tensor, idt):
+    return {"inp": input_tensor}
+
+
+@pytest.mark.parametrize("idt", [DataType.UINT8, DataType.UINT16, DataType.INT16])
+# labels
+@pytest.mark.parametrize("labels", [10, 100])
+# folding
+@pytest.mark.parametrize("fold", [-1, 2, 10])
+# number of top labels to select
+@pytest.mark.parametrize("k", [1, 5])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+def test_fpgadataflow_labelselect(idt, labels, fold, k, exec_mode):
+    np.random.seed(0)
+    if fold == -1:
+        pe = 1
+    else:
+        pe = labels // fold
+    assert labels % pe == 0
+
+    if k == -1:
+        k = labels
+
+    # generate input data
+    x = gen_finn_dt_tensor(idt, (1, labels))
+
+    model = make_labelselect_modelwrapper(labels, pe, k, idt)
+
+    if exec_mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        model = model.transform(SetExecMode("rtlsim"))
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP("xc7z020clg400-1", 5))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # prepare input data and execute
+    input_dict = prepare_inputs(x, idt)
+    y = oxe.execute_onnx(model, input_dict)["outp"]
+
+    assert soft_verify_topk(x, y, k), exec_mode + " failed"
diff --git a/tests/fpgadataflow/test_fpgadataflow_res_estimate.py b/tests/fpgadataflow/test_fpgadataflow_res_estimate.py
index 38f792ed3cdd52044b28b4c19ac0603da4e502e6..398a17132a2ef6c92e600102ff5c0b71a1f65aaa 100644
--- a/tests/fpgadataflow/test_fpgadataflow_res_estimate.py
+++ b/tests/fpgadataflow/test_fpgadataflow_res_estimate.py
@@ -92,7 +92,7 @@ def test_res_estimate():
     model = model.transform(GiveUniqueNodeNames())
     prod_resource_estimation = model.analysis(res_estimation)
     expect_resource_estimation = {
-        "StreamingFCLayer_Batch_0": {"BRAM_18K": 1, "LUT": 304.4}
+        "StreamingFCLayer_Batch_0": {"BRAM_18K": 1, 'BRAM_efficiency': 0.001736111111111111, "LUT": 304.4}
     }
 
     assert check_two_dict_for_equality(
diff --git a/tests/fpgadataflow/test_fpgadataflow_thresholding.py b/tests/fpgadataflow/test_fpgadataflow_thresholding.py
new file mode 100644
index 0000000000000000000000000000000000000000..75fa625ff00ad6d367e2d6c94d98705f391fb9be
--- /dev/null
+++ b/tests/fpgadataflow/test_fpgadataflow_thresholding.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+
+import numpy as np
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.analysis.fpgadataflow.hls_synth_res_estimation import hls_synth_res_estimation
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.custom_op.multithreshold import multithreshold
+from finn.transformation.fpgadataflow.prepare_ip import PrepareIP
+from finn.transformation.fpgadataflow.prepare_cppsim import PrepareCppSim
+from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
+from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
+from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
+from finn.transformation.general import GiveUniqueNodeNames
+from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+
+
+def make_single_thresholding_modelwrapper(T, pe, idt, odt, actval):
+    NumChannels = T.shape[0]
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, NumChannels])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, NumChannels])
+
+    node_inp_list = ["inp", "thresh"]
+
+    Thresholding_node = helper.make_node(
+        "Thresholding_Batch",
+        node_inp_list,
+        ["outp"],
+        domain="finn",
+        backend="fpgadataflow",
+        NumChannels=NumChannels,
+        PE=pe,
+        inputDataType=idt.name,
+        outputDataType=odt.name,
+        ActVal=actval,
+    )
+    graph = helper.make_graph(
+        nodes=[Thresholding_node],
+        name="thresholding_graph",
+        inputs=[inp],
+        outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="thresholding-model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype("outp", odt)
+
+    model.set_tensor_datatype("thresh", idt)
+    model.set_initializer("thresh", T)
+    return model
+
+
+# activation: None or DataType
+@pytest.mark.parametrize("act", [DataType.INT4, DataType.BIPOLAR])
+# input datatype
+@pytest.mark.parametrize("idt", [DataType.INT16, DataType.UINT16])
+# folding, -1 is maximum possible
+@pytest.mark.parametrize("nf", [-1, 2, 1])
+# number of input features
+@pytest.mark.parametrize("ich", [16])
+# execution mode
+@pytest.mark.parametrize("exec_mode", ["cppsim", "rtlsim"])
+@pytest.mark.vivado
+@pytest.mark.slow
+def test_fpgadataflow_thresholding(idt, act, nf, ich, exec_mode):
+    if nf == -1:
+        nf = ich
+    pe = ich // nf
+    assert ich % pe == 0
+
+    # generate input data
+    x = gen_finn_dt_tensor(idt, (1, ich))
+
+    odt = act
+    n_steps = act.get_num_possible_values() - 1
+    T = np.random.randint(idt.min(), idt.max() + 1, (ich, n_steps)).astype(np.float32)
+    # provide non-decreasing thresholds
+    T = np.sort(T, axis=1)
+
+    if odt == DataType.BIPOLAR:
+        actval = 0
+    else:
+        actval = odt.min()
+
+    model = make_single_thresholding_modelwrapper(T, pe, idt, odt, actval)
+
+    if exec_mode == "cppsim":
+        model = model.transform(PrepareCppSim())
+        model = model.transform(CompileCppSim())
+        model = model.transform(SetExecMode("cppsim"))
+    elif exec_mode == "rtlsim":
+        model = model.transform(SetExecMode("rtlsim"))
+        model = model.transform(GiveUniqueNodeNames())
+        model = model.transform(PrepareIP("xc7z020clg400-1", 5))
+        model = model.transform(HLSSynthIP())
+        model = model.transform(PrepareRTLSim())
+    else:
+        raise Exception("Unknown exec_mode")
+
+    # package input data as dictionary
+    input_dict = {"inp": x}
+
+    y = multithreshold(x, T)
+    if act == DataType.BIPOLAR:
+        # binary to bipolar
+        y = 2 * y - 1
+    else:
+        # signed offset
+        y += act.min()
+
+    oshape = model.get_tensor_shape("outp")
+    y_expected = y.reshape(oshape)
+    # execute model
+    y_produced = oxe.execute_onnx(model, input_dict)["outp"]
+
+    y_produced = y_produced.reshape(y_expected.shape)
+
+    assert (y_produced == y_expected).all(), "cppsim failed"
+
+    if exec_mode == "rtlsim":
+        hls_synt_res_est = model.analysis(hls_synth_res_estimation)
+        assert "Thresholding_Batch_0" in hls_synt_res_est
+
+        node = model.get_nodes_by_op_type("Thresholding_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=10)
+        assert exp_cycles != 0
diff --git a/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py b/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py
index ac4ab33469c7720c3d7b9f30f5d13be888e1439d..d61edc86dd6b5669c334e6b7f78ea9a8550cae93 100644
--- a/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py
+++ b/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py
@@ -41,6 +41,9 @@ from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode
 from finn.transformation.fpgadataflow.prepare_rtlsim import PrepareRTLSim
 from finn.transformation.general import GiveUniqueNodeNames
 from finn.util.basic import gen_finn_dt_tensor
+from finn.analysis.fpgadataflow.exp_cycles_per_layer import exp_cycles_per_layer
+from finn.custom_op.registry import getCustomOp
+import numpy as np
 
 
 def make_single_maxpoolnhwc_modelwrapper(k, ifm_ch, ifm_dim, ofm_dim, idt):
@@ -121,6 +124,8 @@ def prepare_inputs(input_tensor):
 @pytest.mark.parametrize("ifm_ch", [1, 2])  # , 2, 3, 4])
 # execution mode
 @pytest.mark.parametrize("exec_mode", ["rtlsim", "cppsim"])
+@pytest.mark.slow
+@pytest.mark.vivado
 def test_fpgadataflow_streamingmaxpool(idt, k, ifm_dim, ifm_ch, exec_mode):
     stride = k
     ofm_dim = int(((ifm_dim - k) / stride) + 1)
@@ -152,3 +157,12 @@ def test_fpgadataflow_streamingmaxpool(idt, k, ifm_dim, ifm_ch, exec_mode):
     # execute model
     y_produced = oxe.execute_onnx(model, input_dict)["outp"]
     assert (y_produced == y_expected).all()
+
+    if exec_mode == "rtlsim":
+        node = model.get_nodes_by_op_type("StreamingMaxPool_Batch")[0]
+        inst = getCustomOp(node)
+        cycles_rtlsim = inst.get_nodeattr("cycles_rtlsim")
+        exp_cycles_dict = model.analysis(exp_cycles_per_layer)
+        exp_cycles = exp_cycles_dict[node.name]
+        assert np.isclose(exp_cycles, cycles_rtlsim, atol=15)
+        assert exp_cycles != 0
diff --git a/tests/transformation/streamline/test_streamline_cnv.py b/tests/transformation/streamline/test_streamline_cnv.py
index 56dcd26076ec0a5fba6e9be6acac7f5e13572c3d..82a38636e3927e17e5e2a3e8714f46082bba10e4 100644
--- a/tests/transformation/streamline/test_streamline_cnv.py
+++ b/tests/transformation/streamline/test_streamline_cnv.py
@@ -34,19 +34,23 @@ import pkg_resources as pk
 import finn.core.onnx_exec as oxe
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fold_constants import FoldConstants
-from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.transformation.general import (
+    RemoveUnusedTensors,
+    RemoveStaticGraphInputs,
+    GiveReadableTensorNames,
+    GiveUniqueNodeNames,
+)
 from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.streamline import Streamline
 from finn.util.test import get_test_model_trained
 from finn.util.basic import make_build_dir
-from finn.transformation.double_to_single_float import DoubleToSingleFloat
 
 export_onnx_path = make_build_dir("test_streamline_cnv_")
 
 # act bits
-@pytest.mark.parametrize("abits", [1])
+@pytest.mark.parametrize("abits", [1, 2])
 # weight bits
-@pytest.mark.parametrize("wbits", [1])
+@pytest.mark.parametrize("wbits", [1, 2])
 # network topology / size
 @pytest.mark.parametrize("size", ["CNV"])
 def test_streamline_cnv(size, wbits, abits):
@@ -57,11 +61,11 @@ def test_streamline_cnv(size, wbits, abits):
     fc = get_test_model_trained(size, wbits, abits)
     bo.export_finn_onnx(fc, (1, 3, 32, 32), finn_onnx)
     model = ModelWrapper(finn_onnx)
-    model = model.transform(DoubleToSingleFloat())
     model = model.transform(InferShapes())
     model = model.transform(FoldConstants())
     model = model.transform(GiveUniqueNodeNames())
     model = model.transform(GiveReadableTensorNames())
+    model = model.transform(RemoveStaticGraphInputs())
     # load one of the test vectors
     fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
     input_tensor = np.load(fn)["arr_0"].astype(np.float32)
@@ -73,7 +77,11 @@ def test_streamline_cnv(size, wbits, abits):
     expected = expected_ctx[model.graph.output[0].name]
     # model.save("orig_cnv.onnx")
     model = model.transform(Streamline())
+    model = model.transform(RemoveUnusedTensors())
+    assert len(model.graph.initializer) == 21
+    assert len(model.graph.value_info) == 43
     # model.save("streamlined_cnv.onnx")
+    assert len(model.graph.node) == 23
     produced_ctx = oxe.execute_onnx(model, input_dict, True)
     produced = produced_ctx[model.graph.output[0].name]
     assert np.isclose(expected, produced, atol=1e-3).all()
diff --git a/tests/transformation/streamline/test_streamline_fc.py b/tests/transformation/streamline/test_streamline_fc.py
index c68561239b7c30973856fa282d20cd2afaa168ae..9ce98066cfbf9d1c64514b957d8a260705fd0d7c 100644
--- a/tests/transformation/streamline/test_streamline_fc.py
+++ b/tests/transformation/streamline/test_streamline_fc.py
@@ -37,7 +37,12 @@ import pytest
 import finn.core.onnx_exec as oxe
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.fold_constants import FoldConstants
-from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.transformation.general import (
+    RemoveUnusedTensors,
+    RemoveStaticGraphInputs,
+    GiveReadableTensorNames,
+    GiveUniqueNodeNames,
+)
 from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.streamline import Streamline
 from finn.util.test import get_test_model_trained
@@ -65,6 +70,7 @@ def test_streamline_fc(size, wbits, abits):
     model = model.transform(FoldConstants())
     model = model.transform(GiveUniqueNodeNames())
     model = model.transform(GiveReadableTensorNames())
+    model = model.transform(RemoveStaticGraphInputs())
     # load one of the test vectors
     raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
     input_tensor = onnx.load_tensor_from_string(raw_i)
@@ -73,6 +79,10 @@ def test_streamline_fc(size, wbits, abits):
     expected_ctx = oxe.execute_onnx(model, input_dict, True)
     expected = expected_ctx[model.graph.output[0].name]
     model = model.transform(Streamline())
+    model = model.transform(RemoveUnusedTensors())
+    assert len(model.graph.initializer) == 11
+    assert len(model.graph.value_info) == 21
+    assert len(model.graph.quantization_annotation) == 20
     produced_ctx = oxe.execute_onnx(model, input_dict, True)
     produced = produced_ctx[model.graph.output[0].name]
     assert np.isclose(expected, produced, atol=1e-3).all()
diff --git a/tests/transformation/test_absorb_mul_into_topk.py b/tests/transformation/test_absorb_mul_into_topk.py
new file mode 100644
index 0000000000000000000000000000000000000000..d0a089f9e5f894a5da635672eb58af1d8ddef3ef
--- /dev/null
+++ b/tests/transformation/test_absorb_mul_into_topk.py
@@ -0,0 +1,113 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import pytest
+
+import numpy as np
+from onnx import TensorProto, helper
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
+from finn.transformation.insert_topk import InsertTopK
+from finn.transformation.streamline.absorb import AbsorbScalarMulAddIntoTopK
+import finn.core.onnx_exec as oxe
+
+# parameter to indicate if mul parameter is negative or positive
+@pytest.mark.parametrize("mul_positive", [True, False])
+# parameter to indicate if mul parameter is scalar or not
+@pytest.mark.parametrize("scalar", [True, False])
+def test_absorb_mul_into_topk(mul_positive, scalar):
+    if scalar is True:
+        shape = [1]
+    else:
+        shape = [1, 1, 1, 1000]
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, 1, 1, 1000])
+    a0 = helper.make_tensor_value_info("a0", TensorProto.FLOAT, shape)
+    b0 = helper.make_tensor_value_info("b0", TensorProto.FLOAT, [1, 1, 1, 1000])
+    c0 = helper.make_tensor_value_info("c0", TensorProto.FLOAT, shape)
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, 1, 1, 1000])
+
+    mul_node = helper.make_node("Mul", ["inp", "a0"], ["b0"])
+    add_node = helper.make_node("Add", ["b0", "c0"], ["outp"])
+    mul_graph = helper.make_graph(
+        nodes=[mul_node, add_node],
+        name="mul-graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[a0, b0, c0],
+    )
+
+    model = helper.make_model(mul_graph, producer_name="mul_model")
+    model = ModelWrapper(model)
+    # initialize values
+    # for mul
+    if mul_positive is True:
+        a0_values = np.random.uniform(low=0.1, high=1, size=tuple(shape)).astype(
+            np.float32
+        )
+    else:
+        a0_values = np.random.uniform(low=-1, high=-0.1, size=tuple(shape)).astype(
+            np.float32
+        )
+    model.set_initializer("a0", a0_values)
+    # for add
+    c0_values = np.random.uniform(low=-1, high=-0.1, size=tuple(shape)).astype(
+        np.float32
+    )
+    model.set_initializer("c0", c0_values)
+    model = model.transform(InsertTopK())
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    model_transformed = model.transform(AbsorbScalarMulAddIntoTopK())
+
+    # compare execution results
+    inp_values = np.random.uniform(low=-10, high=10, size=(1, 1, 1, 1000)).astype(
+        np.float32
+    )
+    idict = {"global_in": inp_values}
+    odict = oxe.execute_onnx(model, idict, True)
+    y_indices = odict["global_out"]
+    y_values = odict["TopK_0_out0"]
+    odict = oxe.execute_onnx(model_transformed, idict, True)
+    y_tr_indices = odict["global_out"]
+    y_tr_values = odict["TopK_0_out0"]
+
+    # the indices stay the same, if the model is transformed or not
+    assert (y_indices == y_tr_indices).all()
+
+    if scalar is True and mul_positive is True:
+        # the values change if the model was transformed
+        assert (y_values != y_tr_values).all()
+
+        # check for new order
+        assert model.graph != model_transformed.graph
+        assert len(model.graph.node) - 2 == len(model_transformed.graph.node)
+        assert model_transformed.graph.node[0].op_type == "TopK"
diff --git a/tests/transformation/test_absorb_opposite_transposes.py b/tests/transformation/test_absorb_opposite_transposes.py
new file mode 100644
index 0000000000000000000000000000000000000000..859e691277a261f01b559e2e166763e402c5d689
--- /dev/null
+++ b/tests/transformation/test_absorb_opposite_transposes.py
@@ -0,0 +1,76 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import numpy as np
+import onnx.helper as oh
+from onnx import TensorProto
+
+import finn.core.onnx_exec as ox
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.streamline.absorb import AbsorbConsecutiveTransposes
+
+
+def test_absorb_opposite_transposes():
+    np.random.seed(0)
+    input_shape = [1, 3, 4, 2]
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, input_shape)
+    value_info = [oh.make_tensor_value_info("add_param_0", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("add_param_1", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("mul_param_0", TensorProto.FLOAT, [1])]
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                oh.make_node("Add", ["top_in", "add_param_0"], ["t0"]),
+                oh.make_node("Transpose", ["t0"], ["t1"], perm=[0, 2, 3, 1]),
+                oh.make_node("Transpose", ["t1"], ["t2"], perm=[0, 3, 1, 2]),
+                oh.make_node("Add", ["t2", "add_param_1"], ["t3"]),
+                oh.make_node("Transpose", ["t3"], ["t4"], perm=[0, 2, 3, 1]),
+                oh.make_node("Transpose", ["t4"], ["t5"], perm=[0, 3, 1, 2]),
+                oh.make_node("Add", ["t5", "t2"], ["t6"]),
+                oh.make_node("Mul", ["t6", "mul_param_0"], ["top_out"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+    model.set_initializer("add_param_0", np.asarray([1], dtype=np.float32))
+    model.set_initializer("add_param_1", np.asarray([3], dtype=np.float32))
+    model.set_initializer("mul_param_0", np.asarray([2], dtype=np.float32))
+    new_model = model.transform(AbsorbConsecutiveTransposes())
+    new_model = new_model.transform(InferShapes())
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+    assert ox.compare_execution(model, model, inp_dict)
+    assert len(new_model.graph.node) == 4
+    for n in new_model.graph.node:
+        assert new_model.graph.node[0].op_type != "Transpose"
diff --git a/tests/transformation/test_absorb_transp_into_flatten.py b/tests/transformation/test_absorb_transp_into_flatten.py
new file mode 100644
index 0000000000000000000000000000000000000000..cbbb33b4606acf55ace662da0986105f8c456b39
--- /dev/null
+++ b/tests/transformation/test_absorb_transp_into_flatten.py
@@ -0,0 +1,99 @@
+import pytest
+
+import numpy as np
+from onnx import TensorProto, helper
+
+from finn.core.modelwrapper import ModelWrapper
+import finn.core.data_layout as DataLayout
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
+from finn.transformation.streamline.absorb import AbsorbTransposeIntoFlatten
+import finn.core.onnx_exec as oxe
+
+# permutation of transpose node
+@pytest.mark.parametrize("perm", [[0, 2, 3, 1], [0, 1, 3, 2], [3, 2, 0, 1]])
+# reshape or flatten
+@pytest.mark.parametrize("shape", [None, [1, -1], [-1, 1]])
+# input shape
+@pytest.mark.parametrize("ishape", [[1, 1, 1, 4], [2, 4, 1, 1], [1, 2, 2, 4]])
+# datalayout
+@pytest.mark.parametrize("data_layout", ["NCHW", "NHWC"])
+def test_absorb_transp_into_flatten(perm, shape, ishape, data_layout):
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, ishape)
+    transp_node = helper.make_node("Transpose", ["inp"], ["transp_out"], perm=perm)
+    dummy_in = np.random.uniform(low=0, high=1, size=tuple(ishape)).astype(np.float32)
+    if shape is None:
+        shape_node = helper.make_node("Flatten", ["transp_out"], ["outp"])
+        dummy_in = dummy_in.transpose(tuple(perm))
+        oshape = dummy_in.reshape(dummy_in.shape[0], -1).shape
+        outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, oshape)
+        shape0 = None
+    else:
+        shape0 = helper.make_tensor_value_info("shape0", TensorProto.FLOAT, shape)
+        shape_node = helper.make_node("Reshape", ["transp_out", "shape0"], ["outp"])
+        oshape = dummy_in.transpose(tuple(perm)).reshape(tuple(shape)).shape
+        outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, oshape)
+
+    graph = helper.make_graph(
+        nodes=[transp_node, shape_node],
+        name="absorb-transpose-graph",
+        inputs=[inp],
+        outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="absorb_transpose_model")
+    model = ModelWrapper(model)
+    if shape is not None:
+        model.graph.value_info.append(shape0)
+        model.set_initializer("shape0", np.asarray(shape))
+    if data_layout == "NCHW":
+        model.set_tensor_layout("inp", DataLayout.NCHW)
+    else:
+        model.set_tensor_layout("inp", DataLayout.NHWC)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(InferDataLayouts())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    # model.save("test.onnx")
+    model_transformed = model.transform(AbsorbTransposeIntoFlatten())
+    # model_transformed.save("test2.onnx")
+
+    # verify transformation
+    inp_values = np.random.uniform(low=-1, high=1, size=tuple(ishape)).astype(
+        np.float32
+    )
+    idict = {model.graph.input[0].name: inp_values}
+    assert oxe.compare_execution(model, model_transformed, idict)
+
+    # only some of the parameter combinations lead to a graph that will be changed when
+    # AbsorbTransposeIntoFlatten is applied
+
+    if shape == [-1, 1]:  # not a flatten operation, so the graph will not be changed
+        assert model.graph == model_transformed.graph
+
+    elif perm == [
+        3,
+        2,
+        0,
+        1,
+    ]:  # the first dimension is also part of the transpose operation
+        # so the graph will not be changed
+        assert model.graph == model_transformed.graph
+
+    # the following cases are the ones in which the model is transformed
+    # because we tested the parameters shape and perm befire we can only consider ishape
+    # and data_layout (the transformed model should only contain a "Flatten" node)
+    elif ishape == [1, 1, 1, 4] and data_layout == "NHWC":
+        assert model_transformed.graph.node[0].op_type == "Flatten"
+
+    elif ishape == [2, 4, 1, 1] and data_layout == "NCHW" and shape is None:
+        # If the first  dimension of the input tensor is not 1, flatten and
+        # reshape (with shape = [1, -1]) would lead to different results
+        assert model_transformed.graph.node[0].op_type == "Flatten"
+
+    # all other cases lead to an unchanged model
+    else:
+        assert model.graph == model_transformed.graph
diff --git a/tests/transformation/test_batchnorm_to_affine.py b/tests/transformation/test_batchnorm_to_affine.py
index 43110c6bf9e5469b2ca21ac667d7f92808017fb8..a3df5ae9bbd3f99bc29bc088a5f461122af06d81 100644
--- a/tests/transformation/test_batchnorm_to_affine.py
+++ b/tests/transformation/test_batchnorm_to_affine.py
@@ -41,7 +41,6 @@ from finn.transformation.batchnorm_to_affine import BatchNormToAffine
 from finn.transformation.fold_constants import FoldConstants
 from finn.transformation.infer_shapes import InferShapes
 from finn.util.test import get_test_model_trained
-from finn.transformation.double_to_single_float import DoubleToSingleFloat
 
 export_onnx_path = "test_output_bn2affine.onnx"
 
@@ -50,7 +49,6 @@ def test_batchnorm_to_affine_cnv_w1a1():
     lfc = get_test_model_trained("CNV", 1, 1)
     bo.export_finn_onnx(lfc, (1, 3, 32, 32), export_onnx_path)
     model = ModelWrapper(export_onnx_path)
-    model = model.transform(DoubleToSingleFloat())
     model = model.transform(InferShapes())
     model = model.transform(FoldConstants())
     fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
diff --git a/tests/transformation/test_change_datalayout.py b/tests/transformation/test_change_datalayout.py
new file mode 100644
index 0000000000000000000000000000000000000000..66459d574957575e61ec1bec631fb7030a27cca1
--- /dev/null
+++ b/tests/transformation/test_change_datalayout.py
@@ -0,0 +1,112 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import pytest
+from onnx import helper, TensorProto
+
+from finn.custom_op.maxpoolnhwc import compute_pool_output_dim
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+import finn.core.data_layout as DataLayout
+from finn.transformation.change_datalayout import ChangeDataLayoutQuantAvgPool2d
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
+from finn.util.basic import gen_finn_dt_tensor
+from finn.util.basic import get_by_name
+import finn.core.onnx_exec as oxe
+
+# stride
+@pytest.mark.parametrize("s", [1, 2])
+# kernel
+@pytest.mark.parametrize("k", [3, 4])
+# ibits
+@pytest.mark.parametrize("ibits", [4, 8])
+# obits
+@pytest.mark.parametrize("obits", [2, 4])
+# signed
+@pytest.mark.parametrize("signed", [False, True])
+# channels
+@pytest.mark.parametrize("c", [2, 3])
+# input dimension
+@pytest.mark.parametrize("idim", [6, 7])
+def test_change_datalayout_quantavgpool(s, k, ibits, obits, signed, c, idim):
+    n = 1
+    odim = compute_pool_output_dim(idim, k, s)
+    # determine input FINN datatype
+    if signed is True:
+        prefix = "INT"
+    else:
+        prefix = "UINT"
+    dt_name = prefix + str(ibits)
+    dtype = DataType[dt_name]
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [n, c, idim, idim])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [n, c, odim, odim])
+
+    node = helper.make_node(
+        "QuantAvgPool2d",
+        ["inp"],
+        ["outp"],
+        domain="finn",
+        stride=s,
+        kernel=k,
+        ibits=ibits,
+        obits=obits,
+        signed=signed,
+        data_layout="NCHW",
+    )
+    graph = helper.make_graph(
+        nodes=[node], name="single-quantavgpool", inputs=[inp], outputs=[outp]
+    )
+
+    model = helper.make_model(graph)
+    model = ModelWrapper(model)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(InferDataLayouts())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    model_transformed = model.transform(ChangeDataLayoutQuantAvgPool2d())
+    model_transformed = model_transformed.transform(InferShapes())
+    model_transformed = model_transformed.transform(InferDataTypes())
+    model_transformed = model_transformed.transform(InferDataLayouts())
+    model_transformed = model_transformed.transform(GiveUniqueNodeNames())
+    model_transformed = model_transformed.transform(GiveReadableTensorNames())
+    inp_values = gen_finn_dt_tensor(dtype, [n, c, idim, idim])
+    idict = {"inp": inp_values}
+    assert oxe.compare_execution(model, model_transformed, idict)
+    assert len(model.graph.node) + 2 == len(model_transformed.graph.node)
+    assert model_transformed.graph.node[-1].op_type == "Transpose"
+    assert model_transformed.graph.node[0].op_type == "Transpose"
+    # check if QuantAvgPool2d node has datalayout set correctly
+    node = model_transformed.graph.node[1]
+    d_layout = get_by_name(node.attribute, "data_layout").s.decode("UTF-8")
+    assert d_layout == "NHWC"
+    assert model_transformed.get_tensor_layout(node.input[0]) == DataLayout.NHWC
+    assert model_transformed.get_tensor_layout(node.output[0]) == DataLayout.NHWC
diff --git a/tests/transformation/test_collapse_repeated_op.py b/tests/transformation/test_collapse_repeated_op.py
index 01d932ece0be4b0beb7ad6094284ec3efb1e525e..b74d868f9b921c35ff9f596c811583f45f761374 100644
--- a/tests/transformation/test_collapse_repeated_op.py
+++ b/tests/transformation/test_collapse_repeated_op.py
@@ -34,6 +34,7 @@ import finn.core.onnx_exec as ox
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.streamline import CollapseRepeatedAdd, CollapseRepeatedMul
+import pytest
 
 
 def test_collapse_repeated_op():
@@ -67,3 +68,60 @@ def test_collapse_repeated_op():
     new_model = new_model.transform(CollapseRepeatedMul())
     inp_dict = {"top_in": np.asarray([-1.0, 1.0], dtype=np.float32)}
     assert ox.compare_execution(model, new_model, inp_dict)
+    assert len(new_model.graph.node) == 2
+    assert new_model.graph.node[0].op_type == "Add"
+    assert new_model.graph.node[1].op_type == "Mul"
+
+
+@pytest.mark.parametrize(
+    "test_args", [("Add", CollapseRepeatedAdd()), ("Mul", CollapseRepeatedMul())],
+)
+def test_collapse_repeated_only_if_linear(test_args):
+    scalar_op = test_args[0]
+    transf_fxn = test_args[1]
+
+    input_shape = [4, 4]
+    output_shape = input_shape
+
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, output_shape)
+
+    value_info = [oh.make_tensor_value_info("p1", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("p2", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("p3", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("p4", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("p5", TensorProto.FLOAT, [1])]
+
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                oh.make_node(scalar_op, ["top_in", "p2"], ["t1"]),
+                oh.make_node(scalar_op, ["t1", "p1"], ["t2"]),
+                oh.make_node(scalar_op, ["t2", "p3"], ["t3"]),
+                oh.make_node(scalar_op, ["t2", "p4"], ["t4"]),
+                oh.make_node(scalar_op, ["t3", "t4"], ["t5"]),
+                oh.make_node(scalar_op, ["t5", "p5"], ["top_out"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    model.set_initializer("p1", *np.random.rand(1).astype(np.float32))
+    model.set_initializer("p2", *np.random.rand(1).astype(np.float32))
+    model.set_initializer("p3", *np.random.rand(1).astype(np.float32))
+    model.set_initializer("p4", *np.random.rand(1).astype(np.float32))
+    model.set_initializer("p5", *np.random.rand(1).astype(np.float32))
+
+    # Transform
+    new_model = model.transform(transf_fxn)
+
+    # Test
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+    assert ox.compare_execution(model, new_model, inp_dict)
+    assert len(new_model.graph.node) == 5
diff --git a/tests/transformation/test_conv_lowering.py b/tests/transformation/test_conv_lowering.py
index 2cbc8e558940517168678b05c3bb46af8170abce..b6ab634b374dea3ba309bbf12654c73c0a90e36c 100644
--- a/tests/transformation/test_conv_lowering.py
+++ b/tests/transformation/test_conv_lowering.py
@@ -26,28 +26,32 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import pytest
+import onnx.helper as oh
+from onnx import TensorProto
 import os
 import pkg_resources as pk
 import brevitas.onnx as bo
 import numpy as np
 
-
 from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
 from finn.transformation.fold_constants import FoldConstants
 from finn.transformation.infer_shapes import InferShapes
 from finn.util.test import get_test_model_trained
 from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul
-from finn.transformation.double_to_single_float import DoubleToSingleFloat
 import finn.core.onnx_exec as oxe
+from finn.custom_op.im2col import compute_conv_output_dim
+from finn.util.basic import gen_finn_dt_tensor
+from finn.custom_op.registry import getCustomOp
 
-export_onnx_path = "test_output_cnv.onnx"
+export_onnx_path = "test_conv_lowering.onnx"
 
 
 def test_conv_lowering_cnv_w1a1():
     cnv = get_test_model_trained("CNV", 1, 1)
     bo.export_finn_onnx(cnv, (1, 3, 32, 32), export_onnx_path)
     model = ModelWrapper(export_onnx_path)
-    model = model.transform(DoubleToSingleFloat())
     model = model.transform(InferShapes())
     model = model.transform(FoldConstants())
     fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz")
@@ -65,3 +69,121 @@ def test_conv_lowering_cnv_w1a1():
     assert np.isclose(produced, expected).all()
     assert np.argmax(produced) == 3
     os.remove(export_onnx_path)
+
+
+# input datatype
+@pytest.mark.parametrize("idt", [DataType.INT2, DataType.INT4])
+# kernel size
+@pytest.mark.parametrize("k", [2, 4])
+# input dimension
+@pytest.mark.parametrize("ifm_dim", [4, 6])
+# input channels
+@pytest.mark.parametrize("ifm_ch", [2, 3])
+# stride
+@pytest.mark.parametrize("stride", [1, 2])
+# padding
+@pytest.mark.parametrize("padding", [[0, 0, 0, 0], [1, 1, 1, 1]])
+def test_depthwise_conv_lowering(idt, k, ifm_dim, ifm_ch, stride, padding):
+    wdt = idt
+    odt = DataType.INT32
+    ofm_ch = ifm_ch
+    ofm_dim = compute_conv_output_dim(ifm_dim, k, stride, pad=padding[0])
+
+    # set up onnx model
+    inp = oh.make_tensor_value_info(
+        "inp", TensorProto.FLOAT, [1, ifm_ch, ifm_dim, ifm_dim]
+    )
+    outp = oh.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, [1, ofm_ch, ofm_dim, ofm_dim]
+    )
+
+    W = oh.make_tensor_value_info("W", TensorProto.FLOAT, [ofm_ch, 1, k, k])
+
+    dw_cnv = oh.make_node(
+        "Conv",
+        inputs=["inp", "W"],
+        outputs=["outp"],
+        kernel_shape=[k, k],
+        pads=padding,
+        strides=[stride, stride],
+        group=ifm_ch,
+    )
+    graph = oh.make_graph(
+        nodes=[dw_cnv],
+        name="dw_cnv_graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[W],
+    )
+
+    model = oh.make_model(graph, producer_name="dws_cnv-model")
+    model = ModelWrapper(model)
+    model.set_tensor_datatype("inp", idt)
+    model.set_tensor_datatype("outp", odt)
+    model.set_tensor_datatype("W", wdt)
+    w_tensor = gen_finn_dt_tensor(wdt, [ofm_ch, 1, k, k])
+    model.set_initializer("W", w_tensor)
+    model = model.transform(InferShapes())
+
+    input_tensor = gen_finn_dt_tensor(idt, [1, ifm_ch, ifm_dim, ifm_dim])
+    input_dict = {"inp": input_tensor}
+    output_dict = oxe.execute_onnx(model, input_dict)
+    expected = output_dict["outp"]
+
+    model = model.transform(LowerConvsToMatMul())
+    output_dict = oxe.execute_onnx(model, input_dict)
+    produced = output_dict["outp"]
+    assert (produced == expected).all()
+
+    # check if created nodes have attributes that indicate depthwise conv
+    assert model.get_tensor_sparsity("W") is not None
+    im2col_node = getCustomOp(model.graph.node[1])
+    assert im2col_node.get_nodeattr("depthwise") == 1
+
+
+def test_conv_lowering_conv_1x1():
+    np.random.seed(0)
+
+    in_feature_dim = 7
+    in_chn = 3
+    kernel_size = 1
+    out_feature_dim = in_feature_dim
+
+    input_shape = [1, in_chn, in_feature_dim, in_feature_dim]
+    output_shape = [1, in_chn, out_feature_dim, out_feature_dim]
+
+    conv_param_shape = [in_chn, in_chn, kernel_size, kernel_size]
+
+    conv_config = {}
+    conv_config["dilations"] = [1, 1]
+    conv_config["group"] = 1
+    conv_config["kernel_shape"] = [kernel_size, kernel_size]
+    conv_config["pads"] = [0, 0, 0, 0]
+    conv_config["strides"] = [1, 1]
+
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, output_shape)
+
+    value_info = [oh.make_tensor_value_info("p1", TensorProto.FLOAT, conv_param_shape)]
+
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[oh.make_node("Conv", ["top_in", "p1"], ["top_out"], **conv_config)],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+    model.set_initializer("p1", np.random.rand(*conv_param_shape).astype(np.float32))
+
+    new_model = model.transform(LowerConvsToMatMul())
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+
+    assert oxe.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "Transpose"
+    assert new_model.graph.node[1].op_type == "MatMul"
+    assert new_model.graph.node[2].op_type == "Transpose"
+    assert len(new_model.graph.node) == 3
diff --git a/tests/transformation/test_fold_constants.py b/tests/transformation/test_fold_constants.py
index 685c14a98b9031096aaf5b244c4f484d4f308bca..a976ffd62bce744a474a6fac2a61a6478526777f 100644
--- a/tests/transformation/test_fold_constants.py
+++ b/tests/transformation/test_fold_constants.py
@@ -40,7 +40,7 @@ from finn.transformation.fold_constants import FoldConstants
 from finn.transformation.infer_shapes import InferShapes
 from finn.util.test import get_test_model_untrained
 
-export_onnx_path = "test_output_lfc.onnx"
+export_onnx_path = "test_fold_constants.onnx"
 
 
 def test_const_folding():
diff --git a/tests/transformation/test_general_transformation.py b/tests/transformation/test_general_transformation.py
index 33b6041a170f3c0de8f741ef3ecb28682f6429ea..153af378eb3e07d5824f114fd194730048fb4953 100644
--- a/tests/transformation/test_general_transformation.py
+++ b/tests/transformation/test_general_transformation.py
@@ -31,6 +31,12 @@ from pkgutil import get_data
 from finn.core.modelwrapper import ModelWrapper
 from finn.transformation.general import GiveUniqueNodeNames
 
+import numpy as np
+import onnx
+import finn.core.onnx_exec as oxe
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.general import GiveUniqueParameterTensors
+
 
 def test_give_unique_node_names():
     raw_m = get_data("finn", "data/onnx/mnist-conv/model.onnx")
@@ -39,3 +45,76 @@ def test_give_unique_node_names():
     assert model.graph.node[0].name == "Reshape_0"
     assert model.graph.node[1].name == "Conv_0"
     assert model.graph.node[11].name == "Add_2"
+
+
+def test_give_unique_parameter_tensors():
+
+    # Create model
+    input_shape = [4, 4]
+    in1 = onnx.helper.make_tensor_value_info("in1", onnx.TensorProto.FLOAT, input_shape)
+    out1 = onnx.helper.make_tensor_value_info(
+        "out1", onnx.TensorProto.FLOAT, input_shape
+    )
+
+    graph_nodes = []
+    graph_nodes += [
+        onnx.helper.make_node("Add", inputs=["in1", "param1"], outputs=["t1"])
+    ]
+
+    graph_nodes += [
+        onnx.helper.make_node("Sum", inputs=["t1", "param1", "param1"], outputs=["t2"])
+    ]
+
+    graph_nodes += [
+        onnx.helper.make_node("Sum", inputs=["t2", "param2", "param1"], outputs=["t3"])
+    ]
+
+    graph_nodes += [
+        onnx.helper.make_node("Add", inputs=["t3", "param1"], outputs=["out1"])
+    ]
+
+    onnx_graph = onnx.helper.make_graph(
+        nodes=graph_nodes, name="simple_graph", inputs=[in1], outputs=[out1],
+    )
+
+    onnx_model = onnx.helper.make_model(onnx_graph, producer_name="simple-model")
+    model = ModelWrapper(onnx_model)
+
+    # Set param values
+    np.random.seed(0)
+    param1 = np.random.rand(*input_shape).astype(np.float32)
+    param2 = np.random.rand(*input_shape).astype(np.float32)
+    model.set_initializer("param1", param1)
+    model.set_initializer("param2", param2)
+    model = model.transform(InferShapes())
+
+    # Apply transformation
+    new_model = model.transform(GiveUniqueParameterTensors())
+    new_model = new_model.transform(InferShapes())
+
+    # Test
+    # Breaks the model?
+    input_tensor = np.random.rand(*input_shape).astype(np.float32)
+    input_dict = {"in1": input_tensor}
+
+    # run original
+    expected_context = oxe.execute_onnx(model, input_dict)
+    expected_output = expected_context[model.graph.output[0].name]
+
+    # run modified
+    produced_context = oxe.execute_onnx(new_model, input_dict)
+    produced_output = produced_context[new_model.graph.output[0].name]
+
+    assert np.isclose(
+        expected_output, produced_output, atol=1e-8
+    ).all(), " GiveUniqueParameterTensors() transform breaks the model"
+
+    # Does the job?
+    param_set = set()
+    param_cnt = 0
+    for n in new_model.graph.node:
+        for i in range(1, len(n.input)):
+            param_set |= {n.input[i]}
+            param_cnt += 1
+
+    assert len(param_set) == param_cnt, " There are still parameters reused"
diff --git a/tests/transformation/test_infer_data_layouts.py b/tests/transformation/test_infer_data_layouts.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bc30ea0eb48087606545c86e705328217b004ca
--- /dev/null
+++ b/tests/transformation/test_infer_data_layouts.py
@@ -0,0 +1,111 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+
+import brevitas.onnx as bo
+import finn.transformation.streamline.absorb as absorb
+from finn.transformation.streamline.reorder import MakeMaxPoolNHWC
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fold_constants import FoldConstants
+from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.streamline import Streamline
+from finn.util.test import get_test_model_trained
+from finn.transformation.lower_convs_to_matmul import LowerConvsToMatMul
+from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount
+import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
+from finn.transformation.infer_data_layouts import InferDataLayouts
+import finn.core.data_layout as DataLayout
+
+export_onnx_path_cnv = "test_infer_data_layouts.onnx"
+
+
+def test_infer_data_layouts():
+    cnv = get_test_model_trained("CNV", 1, 1)
+    bo.export_finn_onnx(cnv, (1, 3, 32, 32), export_onnx_path_cnv)
+    model = ModelWrapper(export_onnx_path_cnv)
+    model = model.transform(InferShapes())
+    model = model.transform(FoldConstants())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    model = model.transform(Streamline())
+    model = model.transform(InferDataLayouts())
+
+    assert model.get_tensor_layout("global_in") == DataLayout.NCHW
+    assert model.get_tensor_layout("Conv_0_out0") == DataLayout.NCHW
+    assert model.get_tensor_layout("MaxPool_0_out0") == DataLayout.NCHW
+    assert model.get_tensor_layout("MultiThreshold_6_out0") == DataLayout.NCHW
+    assert model.get_tensor_layout("Reshape_0_out0") == DataLayout.NC
+    assert model.get_tensor_layout("MatMul_0_out0") == DataLayout.NC
+    assert model.get_tensor_layout("global_out") == DataLayout.NC
+
+    model = model.transform(LowerConvsToMatMul())
+    model = model.transform(MakeMaxPoolNHWC())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    model = model.transform(InferDataLayouts())
+
+    assert model.get_tensor_layout("global_in") == DataLayout.NCHW
+    assert model.get_tensor_layout("Transpose_0_out0") == DataLayout.NHWC
+    assert model.get_tensor_layout("Im2Col_0_out0") == DataLayout.NHWC
+    # note: im2col output isn't really NHWC or any other common layout
+    # since the concept of channels changes with lowering... but it is
+    # conceptually close to NHWC since the innermost dim gets multiplied
+    assert model.get_tensor_layout("MatMul_0_out0") == DataLayout.NHWC
+    assert model.get_tensor_layout("Transpose_1_out0") == DataLayout.NCHW
+    assert model.get_tensor_layout("Transpose_2_out0") == DataLayout.NHWC
+    assert model.get_tensor_layout("MaxPoolNHWC_0_out0") == DataLayout.NHWC
+    assert model.get_tensor_layout("Reshape_0_out0") == DataLayout.NC
+    assert model.get_tensor_layout("global_out") == DataLayout.NC
+
+    model = model.transform(absorb.AbsorbTransposeIntoMultiThreshold())
+    model = model.transform(ConvertBipolarMatMulToXnorPopcount())
+    model = model.transform(Streamline())
+    model = model.transform(to_hls.InferBinaryStreamingFCLayer())
+    model = model.transform(to_hls.InferQuantizedStreamingFCLayer())
+    model = model.transform(to_hls.InferConvInpGen())
+    model = model.transform(to_hls.InferStreamingMaxPool())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+    model = model.transform(InferDataLayouts())
+
+    assert model.get_tensor_layout("global_in") == DataLayout.NCHW
+    assert model.get_tensor_layout("Transpose_0_out0") == DataLayout.NHWC
+    # note: im2col output isn't really NHWC or any other common layout
+    # since the concept of channels changes with lowering... but it is
+    # conceptually close to NHWC since the innermost dim gets multiplied
+    assert (
+        model.get_tensor_layout("ConvolutionInputGenerator_0_out0") == DataLayout.NHWC
+    )
+    assert model.get_tensor_layout("StreamingFCLayer_Batch_3_out0") == DataLayout.NHWC
+    assert model.get_tensor_layout("Reshape_0_out0") == DataLayout.NC
+    assert model.get_tensor_layout("StreamingFCLayer_Batch_6_out0") == DataLayout.NC
+    assert model.get_tensor_layout("global_out") == DataLayout.NC
+
+    os.remove(export_onnx_path_cnv)
diff --git a/tests/transformation/test_infer_datatypes.py b/tests/transformation/test_infer_datatypes.py
index ae8a52882a9126470dad6ca15d8c35000a8edaff..097ae03f6153843fbb7956a72b38431559d5d0f1 100644
--- a/tests/transformation/test_infer_datatypes.py
+++ b/tests/transformation/test_infer_datatypes.py
@@ -38,11 +38,7 @@ from finn.transformation.infer_datatypes import InferDataTypes
 from finn.transformation.infer_shapes import InferShapes
 from finn.util.test import get_test_model_trained
 
-export_onnx_path = "test_output_lfc.onnx"
-# TODO get from config instead, hardcoded to Docker path for now
-trained_lfc_checkpoint = (
-    "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar"
-)
+export_onnx_path = "test_infer_datatypes.onnx"
 
 
 def test_infer_datatypes():
@@ -58,8 +54,8 @@ def test_infer_datatypes():
     assert model.get_tensor_datatype("MatMul_1_out0") == DataType.INT32
     assert model.get_tensor_datatype("MatMul_2_out0") == DataType.INT32
     assert model.get_tensor_datatype("MatMul_3_out0") == DataType.INT32
-    assert model.get_tensor_datatype("Sign_0_out0") == DataType.BIPOLAR
-    assert model.get_tensor_datatype("Sign_1_out0") == DataType.BIPOLAR
-    assert model.get_tensor_datatype("Sign_2_out0") == DataType.BIPOLAR
-    assert model.get_tensor_datatype("Sign_3_out0") == DataType.BIPOLAR
+    assert model.get_tensor_datatype("MultiThreshold_0_out0") == DataType.BIPOLAR
+    assert model.get_tensor_datatype("MultiThreshold_1_out0") == DataType.BIPOLAR
+    assert model.get_tensor_datatype("MultiThreshold_2_out0") == DataType.BIPOLAR
+    assert model.get_tensor_datatype("MultiThreshold_3_out0") == DataType.BIPOLAR
     os.remove(export_onnx_path)
diff --git a/tests/transformation/test_linear_past_eltwise.py b/tests/transformation/test_linear_past_eltwise.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5af2307fb042879a837a26c50715c8ec1b96963
--- /dev/null
+++ b/tests/transformation/test_linear_past_eltwise.py
@@ -0,0 +1,199 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import numpy as np
+
+from onnx import TensorProto, helper
+
+import finn.core.onnx_exec as oxe
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.fold_constants import FoldConstants
+from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.transformation.streamline.reorder import MoveLinearPastEltwiseAdd
+from finn.transformation.infer_shapes import InferShapes
+
+import pytest
+
+export_onnx_path = "test_linear_past_eltwise.onnx"
+
+# construct a synthetic graph to test:
+# topk insertion, topk conversion to hls, add conversion to hls
+# graph should just be a sum
+
+
+def make_model(shape):
+    inp1 = helper.make_tensor_value_info("inp1", TensorProto.FLOAT, shape)
+    inp2 = helper.make_tensor_value_info("inp2", TensorProto.FLOAT, shape)
+    inp1_add = helper.make_tensor_value_info("inp1_add", TensorProto.FLOAT, shape)
+    inp1_add_ct = helper.make_tensor_value_info("inp1_add_ct", TensorProto.FLOAT, [1])
+    inp2_add = helper.make_tensor_value_info("inp2_add", TensorProto.FLOAT, shape)
+    inp2_add_ct = helper.make_tensor_value_info("inp2_add_ct", TensorProto.FLOAT, [1])
+    inp1_mul = helper.make_tensor_value_info("inp1_mul", TensorProto.FLOAT, shape)
+    inp1_mul_ct = helper.make_tensor_value_info("inp1_mul_ct", TensorProto.FLOAT, [1])
+    inp2_mul = helper.make_tensor_value_info("inp2_mul", TensorProto.FLOAT, shape)
+    inp2_mul_ct = helper.make_tensor_value_info("inp2_mul_ct", TensorProto.FLOAT, [1])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, shape)
+
+    add1_node = helper.make_node("Add", [inp1.name, inp1_add_ct.name], [inp1_add.name])
+    add2_node = helper.make_node("Add", [inp2.name, inp2_add_ct.name], [inp2_add.name])
+    mul1_node = helper.make_node(
+        "Mul", [inp1_add.name, inp1_mul_ct.name], [inp1_mul.name]
+    )
+    mul2_node = helper.make_node(
+        "Mul", [inp2_add.name, inp2_mul_ct.name], [inp2_mul.name]
+    )
+    eltwise_add_node = helper.make_node(
+        "Add", [inp1_mul.name, inp2_mul.name], [outp.name]
+    )
+    graph = helper.make_graph(
+        nodes=[add1_node, add2_node, mul1_node, mul2_node, eltwise_add_node],
+        name="graph",
+        inputs=[inp1, inp2],
+        outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="add-model")
+    model = ModelWrapper(model)
+
+    # set initializers for scalar add/mul nodes
+    model.set_initializer(add1_node.input[1], np.array([7.0]))
+    model.set_initializer(add2_node.input[1], np.array([8.0]))
+    model.set_initializer(mul1_node.input[1], np.array([3.0]))
+    model.set_initializer(mul2_node.input[1], np.array([3.0]))
+
+    return model
+
+
+# channels
+@pytest.mark.parametrize("ch", [64])
+# ifmdim
+@pytest.mark.parametrize("ifmdim", [-1, 7])
+def test_linear_past_eltwise_add(ch, ifmdim):
+    # generate test vectors of correct shape
+    if ifmdim == -1:
+        input_tensor_shape = (1, ch)
+    else:
+        input_tensor_shape = (1, ch, ifmdim, ifmdim)
+
+    model = make_model(input_tensor_shape)
+    model.save(export_onnx_path)
+    model = ModelWrapper(export_onnx_path)
+    model = model.transform(InferShapes())
+    model = model.transform(FoldConstants())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+
+    x1 = np.random.randn(*input_tensor_shape).astype(np.float32)
+    x2 = np.random.randn(*input_tensor_shape).astype(np.float32)
+
+    # generate expected value from streamlined net
+    input_dict = {model.graph.input[0].name: x1, model.graph.input[1].name: x2}
+
+    output_dict = oxe.execute_onnx(model, input_dict, True)
+    produced_sum = output_dict[model.graph.output[0].name]
+    expected_sum = 3.0 * ((x1 + x2) + 15.0)
+    assert np.isclose(expected_sum, produced_sum, atol=1e-3).all()
+    assert len(model.get_nodes_by_op_type("Add")) == 3
+    assert len(model.get_nodes_by_op_type("Mul")) == 2
+
+    model = model.transform(MoveLinearPastEltwiseAdd())
+
+    # verify again, to check we didnt break anything
+    output_dict = oxe.execute_onnx(model, input_dict, True)
+    produced_sum = output_dict[model.graph.output[0].name]
+    assert np.isclose(expected_sum, produced_sum, atol=1e-3).all()
+    assert len(model.get_nodes_by_op_type("Add")) == 2
+    assert len(model.get_nodes_by_op_type("Mul")) == 1
+
+    os.remove(export_onnx_path)
+
+
+@pytest.mark.parametrize("ch", [64, 1])
+# ifmdim
+@pytest.mark.parametrize("ifmdim", [-1, 7])
+def test_linear_past_eltwise_add_multiple_forks(ch, ifmdim):
+    # generate test vectors of correct shape
+    if ifmdim == -1:
+        input_shape = (1, ch)
+    else:
+        input_shape = (1, ch, ifmdim, ifmdim)
+
+    top_in = helper.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = helper.make_tensor_value_info("top_out", TensorProto.FLOAT, input_shape)
+
+    num_of_params = 6
+    value_info = []
+    for i in range(num_of_params):
+        value_info += [
+            helper.make_tensor_value_info("p" + str(i), TensorProto.FLOAT, input_shape)
+        ]
+
+    modelproto = helper.make_model(
+        helper.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                helper.make_node("Add", ["top_in", "p0"], ["fork1"]),
+                helper.make_node("Mul", ["fork1", "p1"], ["t2"]),
+                helper.make_node("Mul", ["fork1", "p2"], ["t3"]),
+                helper.make_node("Add", ["t2", "t3"], ["t4"]),
+                helper.make_node("Mul", ["t4", "p3"], ["fork2"]),
+                helper.make_node("Add", ["fork2", "p4"], ["t5"]),
+                helper.make_node("Add", ["fork2", "p5"], ["t6"]),
+                helper.make_node("Add", ["t5", "t6"], ["top_out"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    for i in range(num_of_params):
+        model.set_initializer(
+            "p" + str(i), np.random.rand(*input_shape).astype(np.float32)
+        )
+
+    # need equal mults:
+    model.set_initializer("p2", model.get_initializer("p1"))
+
+    # Transform
+    new_model = model.transform(MoveLinearPastEltwiseAdd())
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+
+    # Test
+    assert oxe.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "Add"
+    assert new_model.graph.node[1].op_type == "Add"
+    assert new_model.graph.node[2].op_type == "Mul"
+    assert new_model.graph.node[3].op_type == "Mul"
+    assert new_model.graph.node[4].op_type == "Add"
+    assert new_model.graph.node[5].op_type == "Add"
+    assert len(new_model.graph.node) == 6
diff --git a/tests/transformation/test_merge_onnx_models.py b/tests/transformation/test_merge_onnx_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..db7c990baddfb50a39603937a9c5b73f512a0e59
--- /dev/null
+++ b/tests/transformation/test_merge_onnx_models.py
@@ -0,0 +1,126 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from pkgutil import get_data
+
+import numpy as np
+import onnx
+import onnx.numpy_helper as np_helper
+from onnx import TensorProto, helper
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames
+from finn.transformation.merge_onnx_models import MergeONNXModels
+import finn.core.onnx_exec as oxe
+
+
+def test_merge_onnx_models():
+    # load pre model
+    raw_m = get_data("finn", "data/onnx/mnist-conv/model.onnx")
+    model1 = ModelWrapper(raw_m)
+    # the input for model1 comes from a uint8 vector so we set the finn datatype
+    # of the input tensor to DataType.UINT8 to verify that the datatypes are correctly
+    # preserved in the transformed model
+    model1.set_tensor_datatype(model1.graph.input[0].name, DataType.UINT8)
+    model1 = model1.transform(InferShapes())
+    model1 = model1.transform(GiveUniqueNodeNames())
+    model1 = model1.transform(GiveReadableTensorNames())
+
+    # set up post model
+    shape = [1, 10]
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, shape)
+    a0 = helper.make_tensor_value_info("a0", TensorProto.FLOAT, [])
+    a1 = helper.make_tensor_value_info("a1", TensorProto.FLOAT, [])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, shape)
+
+    mul_node = helper.make_node("Mul", ["inp", "a0"], ["mul_out"])
+    div_node = helper.make_node("Div", ["mul_out", "a1"], ["outp"])
+
+    graph = helper.make_graph(
+        nodes=[mul_node, div_node],
+        name="model2-graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[a0, a1],
+    )
+
+    model2 = helper.make_model(graph, producer_name="model2")
+    model2 = ModelWrapper(model2)
+    # initialize model2
+    a0_value = np.random.uniform(low=0, high=1, size=(1)).astype(np.float32)
+    model2.set_initializer("a0", a0_value)
+    a1_value = np.random.uniform(low=0.1, high=1, size=(1)).astype(np.float32)
+    model2.set_initializer("a1", a1_value)
+    # set a dummy sparsity annotation to check if it gets correctly transferred
+    # to the merged model
+    sparsity = {"dw": {"kernel_shape": 0}}
+    model2.set_tensor_sparsity("a1", sparsity)
+    model2 = model2.transform(InferShapes())
+    model2 = model2.transform(InferDataTypes())
+    model2 = model2.transform(InferDataLayouts())
+    model2 = model2.transform(GiveUniqueNodeNames())
+    model2 = model2.transform(GiveReadableTensorNames())
+
+    # simulate the models before the merging and pass the output of model1 to model2
+    # load one of the test vectors
+    raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
+    inp_values = onnx.load_tensor_from_string(raw_i)
+    inp_values = np_helper.to_array(inp_values)
+    idict = {model1.graph.input[0].name: inp_values}
+    odict = oxe.execute_onnx(model1, idict)
+    temp = odict[model1.graph.output[0].name]
+
+    idict = {model2.graph.input[0].name: temp}
+    odict = oxe.execute_onnx(model2, idict)
+    outp = odict[model2.graph.output[0].name]
+    # merge models
+    model_transformed = model2.transform(MergeONNXModels(model1))
+
+    idict = {model_transformed.graph.input[0].name: inp_values}
+    odict = oxe.execute_onnx(model_transformed, idict)
+    outp_transformed = odict[model_transformed.graph.output[0].name]
+
+    assert (outp == outp_transformed).all()
+    assert len(model_transformed.graph.node) == len(model1.graph.node) + len(
+        model2.graph.node
+    )
+    # to test if the value is preserved we set the sparsity annotation of input[1]
+    # of the division block to a dummy value, we can now look for the division block
+    # and check if the sparsity annotation is still the same
+    for n in model_transformed.graph.node:
+        if n.op_type == "Div":
+            tensor_name = n.input[1]
+            set_sparsity = model_transformed.get_tensor_sparsity(tensor_name)
+            assert sparsity == set_sparsity
+
+    # check if finn datatype of graph.input[0] is still set to UINT8
+    assert model_transformed.get_tensor_datatype("global_in") == DataType.UINT8
diff --git a/tests/transformation/test_move_add_past_mul.py b/tests/transformation/test_move_add_past_mul.py
index a0516d6fb2ff985fc112185ce99ad8facd841caf..163b9d310a5f12bd0b854f9aa46f53a549bf109e 100644
--- a/tests/transformation/test_move_add_past_mul.py
+++ b/tests/transformation/test_move_add_past_mul.py
@@ -60,6 +60,9 @@ def test_move_add_past_mul_single():
     new_model = model.transform(MoveAddPastMul())
     inp_dict = {"top_in": np.asarray([-1.0, 1.0], dtype=np.float32)}
     assert ox.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "Mul"
+    assert new_model.graph.node[1].op_type == "Add"
+    assert new_model.graph.node[0].output[0] == new_model.graph.node[1].input[0]
 
 
 def test_move_add_past_mul_multi():
@@ -92,3 +95,50 @@ def test_move_add_past_mul_multi():
     new_model = model.transform(MoveAddPastMul())
     inp_dict = {"top_in": np.asarray([-1.0, 1.0], dtype=np.float32)}
     assert ox.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "Mul"
+    assert new_model.graph.node[1].op_type == "Mul"
+    assert new_model.graph.node[2].op_type == "Add"
+    assert new_model.graph.node[3].op_type == "Add"
+    for i in range(len(new_model.graph.node) - 1):
+        assert new_model.graph.node[i].output[0] == new_model.graph.node[i + 1].input[0]
+
+
+def test_move_add_past_mul_only_if_linear():
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, [2])
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, [2])
+
+    value_info = [oh.make_tensor_value_info("add1_param", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("mul1_param", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("mul2_param", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("mul3_param", TensorProto.FLOAT, [1])]
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                oh.make_node("Add", ["top_in", "add1_param"], ["t1"]),
+                oh.make_node("Mul", ["t1", "mul1_param"], ["fork"]),
+                oh.make_node("Mul", ["fork", "mul2_param"], ["t3"]),
+                oh.make_node("Add", ["t3", "fork"], ["t4"]),
+                oh.make_node("Mul", ["t4", "mul3_param"], ["top_out"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    model.set_initializer("add1_param", np.random.rand(2).astype(np.float32))
+    model.set_initializer("mul1_param", np.random.rand(2).astype(np.float32))
+    model.set_initializer("mul2_param", np.random.rand(2).astype(np.float32))
+    model.set_initializer("mul3_param", np.random.rand(2).astype(np.float32))
+    new_model = model.transform(MoveAddPastMul())
+    inp_dict = {"top_in": np.random.rand(2).astype(np.float32)}
+    assert ox.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "Mul"
+    assert new_model.graph.node[1].op_type == "Add"
+    assert new_model.graph.node[2].op_type == "Mul"
+    assert new_model.graph.node[3].op_type == "Add"
+    assert new_model.graph.node[4].op_type == "Mul"
diff --git a/tests/transformation/test_move_chw_add_past_conv.py b/tests/transformation/test_move_chw_add_past_conv.py
new file mode 100644
index 0000000000000000000000000000000000000000..b626f7e5b8564739ec383aaddfc262d642bf47cc
--- /dev/null
+++ b/tests/transformation/test_move_chw_add_past_conv.py
@@ -0,0 +1,109 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+
+import numpy as np
+from onnx import helper, TensorProto
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.streamline.reorder import MoveAddPastConv
+from finn.custom_op.im2col import compute_conv_output_dim
+import finn.core.onnx_exec as oxe
+
+
+# input dimension
+@pytest.mark.parametrize("idim", [4, 7])
+# kernel size
+@pytest.mark.parametrize("k", [2, 3])
+# stride
+@pytest.mark.parametrize("s", [1, 2])
+# input channels
+@pytest.mark.parametrize("ich", [2, 4])
+# output channels
+@pytest.mark.parametrize("och", [2, 3])
+def test_move_chw_add_past_conv(idim, k, s, ich, och):
+    odim = compute_conv_output_dim(idim, k, s)
+
+    ishape = [1, ich, idim, idim]
+    oshape = [1, och, odim, odim]
+    add_param_shape = [1, ich, 1, 1]
+    conv_param_shape = [och, ich, k, k]
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, ishape)
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, oshape)
+    a0 = helper.make_tensor_value_info("a0", TensorProto.FLOAT, add_param_shape)
+    a1 = helper.make_tensor_value_info("a1", TensorProto.FLOAT, conv_param_shape)
+
+    conv_config = {}
+    conv_config["dilations"] = [1, 1]
+    conv_config["group"] = 1
+    conv_config["kernel_shape"] = [k, k]
+    conv_config["pads"] = [0, 0, 0, 0]
+    conv_config["strides"] = [s, s]
+
+    add_node = helper.make_node("Add", ["inp", "a0"], ["add_out"])
+    conv_node = helper.make_node("Conv", ["add_out", "a1"], ["outp"], **conv_config)
+
+    model = helper.make_model(
+        helper.make_graph(
+            nodes=[add_node, conv_node],
+            name="move-add-graph",
+            inputs=[inp],
+            outputs=[outp],
+            value_info=[a0, a1],
+        )
+    )
+
+    model = ModelWrapper(model)
+    # initialize model
+    a0_values = np.random.uniform(low=0, high=1, size=tuple(add_param_shape)).astype(
+        np.float32
+    )
+    model.set_initializer("a0", a0_values)
+    a1_values = np.random.uniform(low=0, high=1, size=tuple(conv_param_shape)).astype(
+        np.float32
+    )
+    model.set_initializer("a1", a1_values)
+
+    model = model.transform(InferShapes())
+
+    # execution before transformation
+    inp_values = np.random.uniform(low=0, high=1, size=tuple(ishape)).astype(np.float32)
+    idict = {model.graph.input[0].name: inp_values}
+    odict = oxe.execute_onnx(model, idict)
+    y_before = odict[model.graph.output[0].name]
+
+    model = model.transform(MoveAddPastConv())
+    odict = oxe.execute_onnx(model, idict)
+    y_after = odict[model.graph.output[0].name]
+
+    assert np.isclose(y_before, y_after).all()
+    assert model.graph.node[0].op_type == "Conv"
+    assert model.graph.node[1].op_type == "Add"
diff --git a/tests/transformation/test_move_flatten_past_affine.py b/tests/transformation/test_move_flatten_past_affine.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2d5e51613d41f3f2db3dabcef7b982ec2816b19
--- /dev/null
+++ b/tests/transformation/test_move_flatten_past_affine.py
@@ -0,0 +1,106 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import pytest
+
+import numpy as np
+from onnx import TensorProto, helper
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+import finn.core.data_layout as DataLayout
+from finn.util.basic import gen_finn_dt_tensor
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
+from finn.transformation.streamline.reorder import MoveFlattenPastAffine
+import finn.core.onnx_exec as oxe
+
+# data layout
+@pytest.mark.parametrize("data_layout", [DataLayout.NHWC, DataLayout.NCHW])
+# batch size
+@pytest.mark.parametrize("batch_size", [1, 2])
+def test_move_flatten_past_affine(data_layout, batch_size):
+    if data_layout == DataLayout.NHWC:
+        ishape = [batch_size, 1, 1, 1024]
+        oshape = [batch_size, 1000]
+    else:
+        ishape = [batch_size, 1024, 1, 1]
+        oshape = [batch_size, 1000]
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, ishape)
+    a0 = helper.make_tensor_value_info("a1", TensorProto.FLOAT, [1024, 1000])
+    a1 = helper.make_tensor_value_info("a2", TensorProto.FLOAT, [])
+    a2 = helper.make_tensor_value_info("a3", TensorProto.FLOAT, [1000])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, oshape)
+
+    flatten_node = helper.make_node("Flatten", ["inp"], ["flatten_out"])
+    matmul_node = helper.make_node("MatMul", ["flatten_out", "a0"], ["matmul_out"])
+    mul_node = helper.make_node("Mul", ["matmul_out", "a1"], ["mul_out"])
+    add_node = helper.make_node("Add", ["mul_out", "a2"], ["outp"])
+
+    graph = helper.make_graph(
+        nodes=[flatten_node, matmul_node, mul_node, add_node],
+        name="move-reshape-graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[a0, a1, a2],
+    )
+
+    model = helper.make_model(graph, producer_name="move_reshape_model")
+    model = ModelWrapper(model)
+
+    # initialize values
+    a0_values = gen_finn_dt_tensor(DataType.TERNARY, [1024, 1000])
+    model.set_initializer("a0", a0_values)
+    a1_values = np.random.uniform(low=0.1, high=0.99, size=(1)).astype(np.float32)
+    model.set_initializer("a1", a1_values)
+    a2_values = np.random.uniform(low=-1, high=1, size=(1000)).astype(np.float32)
+    model.set_initializer("a2", a2_values)
+
+    model.set_tensor_datatype("inp", DataType.INT2)
+    model.set_tensor_layout("inp", data_layout)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(InferDataLayouts())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+
+    # compare execution before and after transformation
+    inp_values = gen_finn_dt_tensor(DataType.INT2, ishape)
+    idict = {model.graph.input[0].name: inp_values}
+    model_transformed = model.transform(MoveFlattenPastAffine())
+    assert oxe.compare_execution(model, model_transformed, idict)
+
+    # depending on data layout check if graph is transformed or not
+    if data_layout == DataLayout.NHWC:
+        # check if nodes have new order in transformed graph
+        assert model.graph != model_transformed.graph
+        assert model_transformed.graph.node[-1].op_type == "Flatten"
+    else:
+        assert model.graph == model_transformed.graph
diff --git a/tests/transformation/test_move_flatten_past_topk.py b/tests/transformation/test_move_flatten_past_topk.py
new file mode 100644
index 0000000000000000000000000000000000000000..65da92c22dbe9f6b1c5a49172ffae59fa6e98607
--- /dev/null
+++ b/tests/transformation/test_move_flatten_past_topk.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import pytest
+
+from onnx import TensorProto, helper
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.core.datatype import DataType
+import finn.core.data_layout as DataLayout
+from finn.util.basic import gen_finn_dt_tensor
+from finn.transformation.insert_topk import InsertTopK
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
+from finn.transformation.streamline.reorder import MoveFlattenPastTopK
+import finn.core.onnx_exec as oxe
+
+# data layout
+@pytest.mark.parametrize("data_layout", [DataLayout.NHWC, DataLayout.NCHW])
+# batch size
+@pytest.mark.parametrize("batch_size", [1, 2])
+def test_move_flatten_past_affine(data_layout, batch_size):
+    if data_layout == DataLayout.NHWC:
+        ishape = [batch_size, 1, 1, 1024]
+        oshape = [batch_size, 1024]
+    else:
+        ishape = [batch_size, 1024, 1, 1]
+        oshape = [batch_size, 1024]
+
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, ishape)
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, oshape)
+
+    flatten_node = helper.make_node("Flatten", ["inp"], ["outp"])
+
+    graph = helper.make_graph(
+        nodes=[flatten_node], name="move-flatten-graph", inputs=[inp], outputs=[outp],
+    )
+
+    model = helper.make_model(graph, producer_name="move_flatten_model")
+    model = ModelWrapper(model)
+
+    model.set_tensor_datatype("inp", DataType.INT2)
+    model.set_tensor_layout("inp", data_layout)
+    model = model.transform(InsertTopK())
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(InferDataLayouts())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+
+    # compare execution before and after transformation
+    inp_values = gen_finn_dt_tensor(DataType.INT2, ishape)
+    idict = {model.graph.input[0].name: inp_values}
+    model_transformed = model.transform(MoveFlattenPastTopK())
+    assert oxe.compare_execution(model, model_transformed, idict)
+
+    # depending on data layout check if graph is transformed or not
+    if data_layout == DataLayout.NHWC:
+        # check if nodes have new order in transformed graph
+        assert model.graph != model_transformed.graph
+        assert model_transformed.graph.node[-1].op_type == "Flatten"
+    else:
+        assert model.graph == model_transformed.graph
diff --git a/tests/transformation/test_move_maxpool_past_multithreshold.py b/tests/transformation/test_move_maxpool_past_multithreshold.py
new file mode 100644
index 0000000000000000000000000000000000000000..2fc19debf8d6fc89d15e3d731f1e54daa491c321
--- /dev/null
+++ b/tests/transformation/test_move_maxpool_past_multithreshold.py
@@ -0,0 +1,100 @@
+from onnx import TensorProto, helper
+import numpy as np
+
+import finn.core.onnx_exec as oxe
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.streamline.reorder import MoveMaxPoolPastMultiThreshold
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+
+
+def get_multithreshold_rand_params(channels, num_of_thres, seed=None):
+    if seed is not None:
+        np.random.seed(seed)
+    steps = np.random.rand(channels, 1) * 2
+    bias = np.random.rand(channels, 1) * 10
+    thres = [np.arange(num_of_thres) for chn in range(channels)]
+    thres = ((thres - bias) * steps).astype(np.float32)
+    return thres
+
+
+def test_move_maxpool_past_multithreshold():
+    # generate test vectors of correct shape
+    ch = 64
+    ifmdim = 16
+    ofmdim = 16 // 4
+    input_shape = (1, ch, ifmdim, ifmdim)
+    output_shape = (1, ch, ofmdim, ofmdim)
+
+    top_in = helper.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = helper.make_tensor_value_info("top_out", TensorProto.FLOAT, output_shape)
+
+    maxpool_config = {}
+    maxpool_config["pads"] = [1, 1, 1, 1]
+    maxpool_config["kernel_shape"] = [3, 3]
+    maxpool_config["strides"] = [2, 2]
+
+    value_info = []
+    thres1_shape = [1, 1]
+    value_info += [
+        helper.make_tensor_value_info("thres1", TensorProto.FLOAT, thres1_shape)
+    ]
+
+    thres2_shape = [ch, 14]
+    value_info += [
+        helper.make_tensor_value_info("thres2", TensorProto.FLOAT, thres2_shape)
+    ]
+
+    nodes = []
+    nodes += [helper.make_node("MaxPool", ["top_in"], ["t1"], **maxpool_config)]
+    nodes += [
+        helper.make_node(
+            "MultiThreshold",
+            ["t1", "thres1"],
+            ["t2"],
+            domain="finn",
+            out_dtype="BIPOLAR",
+            out_bias=-1.0,
+            out_scale=1.0,
+        )
+    ]
+    nodes += [helper.make_node("MaxPool", ["t2"], ["t3"], **maxpool_config)]
+    nodes += [
+        helper.make_node(
+            "MultiThreshold",
+            ["t3", "thres2"],
+            ["top_out"],
+            domain="finn",
+            out_dtype="UINT4",
+        )
+    ]
+
+    modelproto = helper.make_model(
+        helper.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=nodes,
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+
+    model.set_initializer("thres1", np.array([[0]]))
+    model.set_initializer(
+        "thres2", get_multithreshold_rand_params(*thres2_shape, seed=0)
+    )
+
+    # Transform
+    new_model = model.transform(MoveMaxPoolPastMultiThreshold())
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+
+    # Test
+    assert oxe.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "MaxPool"
+    assert new_model.graph.node[1].op_type == "MultiThreshold"
+    assert new_model.graph.node[2].op_type == "MultiThreshold"
+    assert new_model.graph.node[3].op_type == "MaxPool"
+    assert len(new_model.graph.node) == 4
diff --git a/tests/transformation/test_move_mul_past_dw_conv.py b/tests/transformation/test_move_mul_past_dw_conv.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ae8fbfe89986d58d3d71f5f8735a98469d9d1e3
--- /dev/null
+++ b/tests/transformation/test_move_mul_past_dw_conv.py
@@ -0,0 +1,93 @@
+import pytest
+
+from onnx import helper, TensorProto
+from finn.custom_op.im2col import compute_conv_output_dim
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_shapes import InferShapes
+from finn.util.basic import gen_finn_dt_tensor
+from finn.transformation.streamline.reorder import MoveMulPastDWConv
+
+
+# input dimension
+@pytest.mark.parametrize("ifm_dim", [4, 7])
+# input channels
+@pytest.mark.parametrize("ifm_ch", [2, 3])
+# kernel size
+@pytest.mark.parametrize("k", [2, 3])
+# stride
+@pytest.mark.parametrize("stride", [1, 2])
+# padding
+@pytest.mark.parametrize("pad_amt", [0, 1])
+# depthwise
+@pytest.mark.parametrize("dw", [0, 1])
+def test_move_mul_past_dw_conv(ifm_dim, ifm_ch, k, stride, pad_amt, dw):
+    if dw == 1:
+        ofm_ch = ifm_ch
+        groups = ifm_ch
+        W_shape = [ofm_ch, 1, k, k]
+    else:
+        ofm_ch = ifm_ch + 2
+        groups = 1
+        W_shape = [ofm_ch, ifm_ch, k, k]
+
+    ofm_dim = compute_conv_output_dim(ifm_dim, k, stride, pad_amt)
+
+    # set up onnx model
+    inp = helper.make_tensor_value_info(
+        "inp", TensorProto.FLOAT, [1, ifm_ch, ifm_dim, ifm_dim]
+    )
+    mul = helper.make_tensor_value_info("mul", TensorProto.FLOAT, [1, ifm_ch, 1, 1])
+    W = helper.make_tensor_value_info("W", TensorProto.FLOAT, W_shape)
+    outp = helper.make_tensor_value_info(
+        "outp", TensorProto.FLOAT, [1, ofm_ch, ofm_dim, ofm_dim]
+    )
+
+    Mul_node = helper.make_node("Mul", ["inp", "mul"], ["mul_out"])
+
+    Conv_node = helper.make_node(
+        "Conv",
+        ["mul_out", "W"],
+        ["outp"],
+        group=groups,
+        kernel_shape=[k, k],
+        pads=[pad_amt, pad_amt, pad_amt, pad_amt],
+        strides=[stride, stride],
+    )
+
+    graph = helper.make_graph(
+        nodes=[Mul_node, Conv_node],
+        name="mulpastconv_graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[mul, W],
+    )
+
+    model = helper.make_model(graph, producer_name="mulpastconv-model")
+    model = ModelWrapper(model)
+    inp_values = gen_finn_dt_tensor(DataType.INT2, [1, ifm_ch, ifm_dim, ifm_dim])
+    mul_values = gen_finn_dt_tensor(DataType.INT2, [1, ifm_ch, 1, 1])
+    W_values = gen_finn_dt_tensor(DataType.INT2, W_shape)
+    model.set_initializer("W", W_values)
+    model.set_initializer("mul", mul_values)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    idict = {"inp": inp_values}
+    odict = oxe.execute_onnx(model, idict, True)
+    out_before = odict["outp"]
+
+    # move channelwise multiplication past depthwise conv
+    model_transformed = model.transform(MoveMulPastDWConv())
+    odict = oxe.execute_onnx(model_transformed, idict, True)
+    out_after = odict["outp"]
+
+    assert (out_before == out_after).all()
+
+    if dw == 0:
+        assert model.graph.node[0].op_type == model_transformed.graph.node[0].op_type
+        assert model.graph.node[1].op_type == model_transformed.graph.node[1].op_type
+    else:
+        assert model.graph.node[0].op_type == model_transformed.graph.node[1].op_type
+        assert model.graph.node[1].op_type == model_transformed.graph.node[0].op_type
diff --git a/tests/transformation/test_move_past_fork.py b/tests/transformation/test_move_past_fork.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3d37bd60c9e2580ca4499daafa8693f39fec810
--- /dev/null
+++ b/tests/transformation/test_move_past_fork.py
@@ -0,0 +1,79 @@
+from onnx import TensorProto, helper
+import numpy as np
+
+import finn.core.onnx_exec as oxe
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.streamline.reorder import MoveLinearPastFork
+from finn.transformation.infer_shapes import InferShapes
+
+import pytest
+
+
+@pytest.mark.parametrize("ch", [64, 1])
+# ifmdim
+@pytest.mark.parametrize("ifmdim", [-1, 7])
+def test_move_past_fork(ch, ifmdim):
+    # generate test vectors of correct shape
+    if ifmdim == -1:
+        input_shape = (1, ch)
+    else:
+        input_shape = (1, ch, ifmdim, ifmdim)
+
+    top_in = helper.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = helper.make_tensor_value_info("top_out", TensorProto.FLOAT, input_shape)
+
+    num_of_params = 8
+    value_info = []
+    for i in range(num_of_params):
+        value_info += [
+            helper.make_tensor_value_info("p" + str(i), TensorProto.FLOAT, input_shape)
+        ]
+
+    add_1_to_move = helper.make_node("Add", ["top_in", "p0"], ["fork1"])
+    mul_1_to_move = helper.make_node("Mul", ["t5", "p4"], ["fork2"])
+    add_2_to_move = helper.make_node("Add", ["fork2", "p5"], ["t6"])
+    mul_1_not_to_move = helper.make_node("Mul", ["t8", "p7"], ["fork3"])
+    modelproto = helper.make_model(
+        helper.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                # fork1
+                add_1_to_move,
+                helper.make_node("Mul", ["fork1", "p1"], ["t2"]),
+                helper.make_node("Mul", ["fork1", "p2"], ["t3"]),
+                helper.make_node("Add", ["t2", "t3"], ["t4"]),
+                helper.make_node("Add", ["t4", "p3"], ["t5"]),
+                # fork2
+                mul_1_to_move,
+                add_2_to_move,
+                helper.make_node("Add", ["fork2", "p6"], ["t7"]),
+                helper.make_node("Add", ["t6", "t7"], ["t8"]),
+                # empty branches: do nothing
+                mul_1_not_to_move,
+                helper.make_node("Add", ["fork3", "fork3"], ["top_out"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    for i in range(num_of_params):
+        model.set_initializer(
+            "p" + str(i), np.random.rand(*input_shape).astype(np.float32)
+        )
+
+    # Transform
+    new_model = model.transform(MoveLinearPastFork())
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+
+    # Test
+    assert oxe.compare_execution(model, new_model, inp_dict)
+    assert not new_model.is_fork_node(add_1_to_move)
+    assert not new_model.is_fork_node(mul_1_to_move)
+    assert not new_model.is_fork_node(add_2_to_move)
+    assert new_model.is_fork_node(mul_1_not_to_move)
+    assert len(new_model.graph.node) == 14
diff --git a/tests/transformation/test_move_scalar_past_conv.py b/tests/transformation/test_move_scalar_past_conv.py
new file mode 100644
index 0000000000000000000000000000000000000000..94fee7907d1ed1cccbf95520e903c7d9b43d8f7d
--- /dev/null
+++ b/tests/transformation/test_move_scalar_past_conv.py
@@ -0,0 +1,166 @@
+import numpy as np
+import onnx.helper as oh
+import pytest
+from onnx import TensorProto
+
+import finn.core.onnx_exec as ox
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.streamline import (
+    MoveAddPastConv,
+    MoveScalarMulPastConv,
+)
+
+
+@pytest.mark.parametrize("padding", [False, True])
+@pytest.mark.parametrize(
+    "test_args", [("Add", MoveAddPastConv()), ("Mul", MoveScalarMulPastConv())],
+)
+def test_move_scalar_past_conv(test_args, padding):
+    scalar_op = test_args[0]
+    transf_fxn = test_args[1]
+
+    in_feature_dim = 7
+    in_chn = 3
+
+    stages = 2
+    kernel_size = 3
+
+    out_feature_dim = (
+        in_feature_dim if padding else in_feature_dim - (kernel_size // 2 * 2) * stages
+    )
+
+    input_shape = [1, in_chn, in_feature_dim, in_feature_dim]
+    output_shape = [1, in_chn, out_feature_dim, out_feature_dim]
+
+    conv_param_shape = [in_chn, in_chn, kernel_size, kernel_size]
+
+    conv_config = {}
+    conv_config["dilations"] = [1, 1]
+    conv_config["group"] = 1
+    conv_config["kernel_shape"] = [kernel_size, kernel_size]
+    if padding:
+        conv_config["pads"] = [1, 1, 1, 1]
+    else:
+        conv_config["pads"] = [0, 0, 0, 0]
+    conv_config["strides"] = [1, 1]
+
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, output_shape)
+
+    value_info = [oh.make_tensor_value_info("p1", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("p2", TensorProto.FLOAT, conv_param_shape)]
+    value_info += [oh.make_tensor_value_info("p3", TensorProto.FLOAT, conv_param_shape)]
+
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                oh.make_node(scalar_op, ["top_in", "p1"], ["t1"]),
+                oh.make_node("Conv", ["t1", "p2"], ["t2"], **conv_config),
+                oh.make_node("Conv", ["t2", "p3"], ["top_out"], **conv_config),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    model.set_initializer("p1", *np.random.rand(1).astype(np.float32))
+    model.set_initializer("p2", np.random.rand(*conv_param_shape).astype(np.float32))
+    model.set_initializer("p3", np.random.rand(*conv_param_shape).astype(np.float32))
+    new_model = model.transform(transf_fxn)
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+
+    assert ox.compare_execution(model, new_model, inp_dict)
+    if scalar_op == "Add":
+        if padding:
+            assert new_model.graph.node[0].op_type == scalar_op
+            assert new_model.graph.node[1].op_type == "Conv"
+            assert new_model.graph.node[2].op_type == "Conv"
+        else:
+            assert new_model.graph.node[0].op_type == "Conv"
+            assert new_model.graph.node[1].op_type == "Conv"
+            assert new_model.graph.node[2].op_type == scalar_op
+    else:
+        assert new_model.graph.node[0].op_type == "Conv"
+        assert new_model.graph.node[1].op_type == "Conv"
+        assert new_model.graph.node[2].op_type == scalar_op
+
+
+@pytest.mark.parametrize(
+    "test_args", [("Add", MoveAddPastConv()), ("Mul", MoveScalarMulPastConv())],
+)
+def test_move_scalar_past_conv_only_if_linear(test_args):
+    scalar_op = test_args[0]
+    transf_fxn = test_args[1]
+
+    in_feature_dim = 7
+    in_chn = 1
+    padding = False
+    stages = 3
+    kernel_size = 3
+
+    out_feature_dim = (
+        in_feature_dim if padding else in_feature_dim - (kernel_size // 2 * 2) * stages
+    )
+
+    input_shape = [1, in_chn, in_feature_dim, in_feature_dim]
+    output_shape = [1, in_chn, out_feature_dim, out_feature_dim]
+
+    conv_param_shape = [in_chn, in_chn, kernel_size, kernel_size]
+
+    conv_config = {}
+    conv_config["dilations"] = [1, 1]
+    conv_config["group"] = 1
+    conv_config["kernel_shape"] = [kernel_size, kernel_size]
+    conv_config["pads"] = [0, 0, 0, 0]
+    conv_config["strides"] = [1, 1]
+
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, output_shape)
+
+    value_info = [oh.make_tensor_value_info("p1", TensorProto.FLOAT, [1])]
+    value_info += [oh.make_tensor_value_info("p2", TensorProto.FLOAT, conv_param_shape)]
+    value_info += [oh.make_tensor_value_info("p3", TensorProto.FLOAT, conv_param_shape)]
+    value_info += [oh.make_tensor_value_info("p4", TensorProto.FLOAT, conv_param_shape)]
+    value_info += [oh.make_tensor_value_info("p5", TensorProto.FLOAT, conv_param_shape)]
+
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                oh.make_node("Conv", ["top_in", "p2"], ["t1"], **conv_config),
+                oh.make_node(scalar_op, ["t1", "p1"], ["t2"]),
+                oh.make_node("Conv", ["t2", "p3"], ["t3"], **conv_config),
+                oh.make_node("Conv", ["t2", "p4"], ["t4"], **conv_config),
+                oh.make_node(scalar_op, ["t3", "t4"], ["t5"]),
+                oh.make_node("Conv", ["t5", "p5"], ["top_out"], **conv_config),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    model.set_initializer("p1", *np.random.rand(1).astype(np.float32))
+    model.set_initializer("p2", np.random.rand(*conv_param_shape).astype(np.float32))
+    model.set_initializer("p3", np.random.rand(*conv_param_shape).astype(np.float32))
+    model.set_initializer("p4", np.random.rand(*conv_param_shape).astype(np.float32))
+    model.set_initializer("p5", np.random.rand(*conv_param_shape).astype(np.float32))
+    new_model = model.transform(transf_fxn)
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+
+    assert ox.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "Conv"
+    assert new_model.graph.node[1].op_type == scalar_op
+    assert new_model.graph.node[2].op_type == "Conv"
+    assert new_model.graph.node[3].op_type == "Conv"
+    assert new_model.graph.node[4].op_type == scalar_op
+    assert new_model.graph.node[5].op_type == "Conv"
diff --git a/tests/transformation/test_move_scalar_past_matmul.py b/tests/transformation/test_move_scalar_past_matmul.py
index 896527e82d8cfa869cb979d1102904c70703a14c..e432dbf4ec1a38551609e5914e2d44968a020908 100644
--- a/tests/transformation/test_move_scalar_past_matmul.py
+++ b/tests/transformation/test_move_scalar_past_matmul.py
@@ -27,6 +27,7 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 import numpy as np
+import pytest
 import onnx.helper as oh
 from onnx import TensorProto
 
@@ -99,3 +100,56 @@ def test_move_scalar_add_past_matmul():
     assert new_model.graph.node[0].op_type == "MatMul"
     assert new_model.graph.node[1].op_type == "Add"
     assert new_model.graph.node[0].output[0] == new_model.graph.node[1].input[0]
+
+
+@pytest.mark.parametrize(
+    "test_args",
+    [("Add", MoveScalarAddPastMatMul()), ("Mul", MoveScalarMulPastMatMul())],
+)
+def test_move_scalar_past_matmul_only_if_linear(test_args):
+    scalar_op = test_args[0]
+    transf_fxn = test_args[1]
+    input_shape = [1, 2]
+    matmul_shape = [2, 2]
+    top_in = oh.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = oh.make_tensor_value_info("top_out", TensorProto.FLOAT, input_shape)
+
+    p1 = oh.make_tensor_value_info("p1", TensorProto.FLOAT, [1, 1])
+    p2 = oh.make_tensor_value_info("p2", TensorProto.FLOAT, matmul_shape)
+    p3 = oh.make_tensor_value_info("p3", TensorProto.FLOAT, matmul_shape)
+    p4 = oh.make_tensor_value_info("p4", TensorProto.FLOAT, matmul_shape)
+    modelproto = oh.make_model(
+        oh.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=[p1, p2, p3, p4],
+            nodes=[
+                oh.make_node(scalar_op, ["top_in", "p1"], ["t1"]),
+                oh.make_node("MatMul", ["t1", "p2"], ["fork"]),
+                oh.make_node("MatMul", ["fork", "p3"], ["t3"]),
+                oh.make_node(scalar_op, ["t3", "fork"], ["t4"]),
+                oh.make_node("MatMul", ["t4", "p4"], ["top_out"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    model.set_initializer("p1", np.random.rand(1, 1).astype(np.float32))
+    model.set_initializer("p2", np.random.rand(*matmul_shape).astype(np.float32))
+    model.set_initializer("p3", np.random.rand(*matmul_shape).astype(np.float32))
+    model.set_initializer("p4", np.random.rand(*matmul_shape).astype(np.float32))
+
+    # Transform
+    new_model = model.transform(transf_fxn)
+
+    # Test
+    inp_dict = {"top_in": np.random.rand(*input_shape).astype(np.float32)}
+    assert ox.compare_execution(model, new_model, inp_dict)
+    assert new_model.graph.node[0].op_type == "MatMul"
+    assert new_model.graph.node[1].op_type == scalar_op
+    assert new_model.graph.node[2].op_type == "MatMul"
+    assert new_model.graph.node[3].op_type == scalar_op
+    assert new_model.graph.node[4].op_type == "MatMul"
diff --git a/tests/transformation/test_move_transpose_past_scalar_mul.py b/tests/transformation/test_move_transpose_past_scalar_mul.py
new file mode 100644
index 0000000000000000000000000000000000000000..e434fc7d4f683120176e18a2bfa9da99d9ee0b0e
--- /dev/null
+++ b/tests/transformation/test_move_transpose_past_scalar_mul.py
@@ -0,0 +1,82 @@
+import pytest
+
+import numpy as np
+from onnx import TensorProto, helper
+
+from finn.core.modelwrapper import ModelWrapper
+import finn.core.data_layout as DataLayout
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_data_layouts import InferDataLayouts
+from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
+from finn.transformation.streamline.reorder import MoveTransposePastScalarMul
+import finn.core.onnx_exec as oxe
+
+# permutation of transpose node
+@pytest.mark.parametrize("perm", [[0, 2, 3, 1], [0, 1, 3, 2], [3, 2, 0, 1]])
+# scalar mul
+@pytest.mark.parametrize("scalar", [True, False])
+# data layout
+@pytest.mark.parametrize("data_layout", [None, DataLayout.NHWC, DataLayout.NCHW])
+def test_move_transpose_past_scalar_mul(perm, scalar, data_layout):
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, 2, 3, 4])
+    # to determine out_size we need to calculate with "perm" for this test case
+    dummy_in = np.random.uniform(low=0, high=1, size=(1, 2, 3, 4)).astype(np.float32)
+    out_size = dummy_in.transpose(tuple(perm)).shape
+
+    if scalar is True:
+        a0_size = []
+    else:
+        a0_size = out_size
+    a0 = helper.make_tensor_value_info("a0", TensorProto.FLOAT, a0_size)
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, out_size)
+    transp_node = helper.make_node("Transpose", ["inp"], ["transp_out"], perm=perm)
+    mul_node = helper.make_node("Mul", ["transp_out", "a0"], ["outp"])
+
+    graph = helper.make_graph(
+        nodes=[transp_node, mul_node],
+        name="mv-transpose-graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[a0],
+    )
+
+    model = helper.make_model(graph, producer_name="mv_transpose_model")
+    model = ModelWrapper(model)
+
+    # initialize values
+    a0_values = np.random.uniform(low=0, high=1, size=tuple(a0_size)).astype(np.float32)
+    model.set_initializer("a0", a0_values)
+    if data_layout is not None:
+        model.set_tensor_layout("inp", data_layout)
+        model = model.transform(InferDataLayouts())
+
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    model = model.transform(GiveUniqueNodeNames())
+    model = model.transform(GiveReadableTensorNames())
+
+    # compare execution before and after transformation
+    inp_values = np.random.uniform(low=0, high=1, size=(1, 2, 3, 4)).astype(np.float32)
+    idict = {model.graph.input[0].name: inp_values}
+    model_transformed = model.transform(MoveTransposePastScalarMul())
+    assert oxe.compare_execution(model, model_transformed, idict)
+
+    # check if order changed
+    if scalar is True and data_layout is not None:
+        assert model_transformed.graph.node[0] != model.graph.node[0]
+        assert model_transformed.graph.node[1] != model.graph.node[1]
+        assert model_transformed.graph.node[0].op_type == "Mul"
+        assert model_transformed.graph.node[1].op_type == "Transpose"
+        mul_input = model_transformed.graph.node[0].input[0]
+        mul_output = model_transformed.graph.node[0].output[0]
+        assert model_transformed.get_tensor_layout(mul_input) == data_layout
+        assert model_transformed.get_tensor_layout(mul_output) == data_layout
+    else:
+        assert model_transformed.graph.node[0] == model.graph.node[0]
+        assert model_transformed.graph.node[1] == model.graph.node[1]
+        if data_layout is not None:
+            mul_input = model_transformed.graph.node[1].input[0]
+            mul_output = model_transformed.graph.node[1].output[0]
+            assert model_transformed.get_tensor_layout(mul_input) != data_layout
+            assert model_transformed.get_tensor_layout(mul_output) != data_layout
diff --git a/tests/transformation/test_remove_identity_ops.py b/tests/transformation/test_remove_identity_ops.py
new file mode 100644
index 0000000000000000000000000000000000000000..536c1ab0b48fa44388da23f45b528da3c5f3b2f2
--- /dev/null
+++ b/tests/transformation/test_remove_identity_ops.py
@@ -0,0 +1,81 @@
+import pytest
+
+import numpy as np
+from onnx import helper, TensorProto
+import finn.core.onnx_exec as oxe
+from finn.core.datatype import DataType
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.infer_datatypes import InferDataTypes
+from finn.transformation.infer_shapes import InferShapes
+from finn.transformation.streamline.remove import RemoveIdentityOps
+from finn.util.basic import gen_finn_dt_tensor
+
+
+def insert_identity_op(model, op):
+    if op in ["Add", "Sub"]:
+        val = np.asarray([0.0], dtype=np.float32)
+    elif op in ["Mul", "Div"]:
+        val = np.asarray([1.0], dtype=np.float32)
+    else:
+        return
+
+    identity_node = helper.make_node(op, ["div_out", "value"], ["ident_out"])
+    graph = model.graph
+    graph.node.insert(3, identity_node)
+    graph.node[-1].input[0] = "ident_out"
+    model.set_initializer("value", val)
+
+    return model
+
+
+# identity operations to be inserted
+@pytest.mark.parametrize("op", ["Add", "Sub", "Mul", "Div"])
+def test_remove_identity_ops(op):
+
+    # set up onnx model
+    inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, 4, 1, 1])
+    mul = helper.make_tensor_value_info("mul", TensorProto.FLOAT, [])
+    shape = helper.make_tensor_value_info("shape", TensorProto.FLOAT, [2])
+    div = helper.make_tensor_value_info("div", TensorProto.FLOAT, [])
+    matmul = helper.make_tensor_value_info("matmul", TensorProto.FLOAT, [4, 2])
+    outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, 2])
+
+    mul_node = helper.make_node("Mul", ["inp", "mul"], ["mul_out"])
+    reshape_node = helper.make_node("Reshape", ["mul_out", "shape"], ["reshape_out"])
+    div_node = helper.make_node("Div", ["reshape_out", "div"], ["div_out"])
+    matmul_node = helper.make_node("MatMul", ["div_out", "matmul"], ["outp"])
+
+    graph = helper.make_graph(
+        nodes=[mul_node, reshape_node, div_node, matmul_node],
+        name="identity-graph",
+        inputs=[inp],
+        outputs=[outp],
+        value_info=[mul, shape, div, matmul],
+    )
+
+    model = helper.make_model(graph, producer_name="mulpastconv-model")
+    model = ModelWrapper(model)
+    inp_values = gen_finn_dt_tensor(DataType.INT2, [1, 4, 1, 1])
+    mul_values = np.random.uniform(low=0.1, high=0.99, size=(1)).astype(np.float32)
+    shape_values = np.asarray([1, -1], dtype=np.int64)
+    div_values = np.random.uniform(low=0.1, high=0.99, size=(1)).astype(np.float32)
+    matmul_values = gen_finn_dt_tensor(DataType.INT2, [4, 2])
+    model.set_initializer("mul", mul_values)
+    model.set_initializer("shape", shape_values)
+    model.set_initializer("div", div_values)
+    model.set_initializer("matmul", matmul_values)
+    insert_identity_op(model, op)
+    model = model.transform(InferShapes())
+    model = model.transform(InferDataTypes())
+    idict = {"inp": inp_values}
+    odict = oxe.execute_onnx(model, idict)
+    out_before = odict["outp"]
+    num_of_nodes_before = len(model.graph.node)
+
+    model = model.transform(RemoveIdentityOps())
+    num_of_nodes_after = len(model.graph.node)
+    assert num_of_nodes_before - 1 == num_of_nodes_after
+
+    odict = oxe.execute_onnx(model, idict)
+    out_after = odict["outp"]
+    assert (out_before == out_after).all()
diff --git a/tests/transformation/test_sign_to_thres.py b/tests/transformation/test_sign_to_thres.py
index 1033a313560c714b02e256e5940694868fa41cbf..a92f839e5f6ca8b45eadf939fa35973ac153e0b1 100644
--- a/tests/transformation/test_sign_to_thres.py
+++ b/tests/transformation/test_sign_to_thres.py
@@ -40,12 +40,7 @@ from finn.transformation.infer_shapes import InferShapes
 from finn.transformation.streamline import ConvertSignToThres
 from finn.util.test import get_test_model_trained
 
-export_onnx_path = "test_output_lfc.onnx"
-transformed_onnx_path = "test_output_lfc_transformed.onnx"
-# TODO get from config instead, hardcoded to Docker path for now
-trained_lfc_checkpoint = (
-    "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar"
-)
+export_onnx_path = "test_sign_to_thres.onnx"
 
 
 def test_sign_to_thres():
diff --git a/tests/transformation/test_sort_graph.py b/tests/transformation/test_sort_graph.py
new file mode 100644
index 0000000000000000000000000000000000000000..05842504c13b144bb34e8084fb12b5086fa84115
--- /dev/null
+++ b/tests/transformation/test_sort_graph.py
@@ -0,0 +1,150 @@
+from onnx import TensorProto, helper
+import numpy as np
+
+from finn.core.modelwrapper import ModelWrapper
+from finn.transformation.general import SortGraph
+from finn.transformation.infer_shapes import InferShapes
+import pytest
+import finn.analysis.topology as ta
+
+
+def make_randomly_sorted_linear_model(num_of_nodes, seed=None):
+    if seed is not None:
+        np.random.seed(seed)
+
+    ch = 2
+    ifmdim = 16
+    input_shape = (1, ch, ifmdim, ifmdim)
+
+    top_in = helper.make_tensor_value_info("t0", TensorProto.FLOAT, input_shape)
+    top_out = helper.make_tensor_value_info(
+        "t" + str(num_of_nodes), TensorProto.FLOAT, input_shape
+    )
+
+    value_info = []
+    nodes = []
+    for i in range(num_of_nodes):
+        nodes += [
+            helper.make_node("Add", ["t" + str(i), "p" + str(i)], ["t" + str(i + 1)])
+        ]
+        value_info += [
+            helper.make_tensor_value_info("p" + str(i), TensorProto.FLOAT, input_shape)
+        ]
+
+    nodes = np.random.permutation(nodes)
+
+    modelproto = helper.make_model(
+        helper.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=nodes,
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    for i in range(num_of_nodes):
+        model.set_initializer(
+            "p" + str(i), np.random.rand(*input_shape).astype(np.float32)
+        )
+
+    return model
+
+
+@pytest.mark.parametrize("num_of_nodes", [64])
+def test_sort_linear_graph(num_of_nodes):
+    model = make_randomly_sorted_linear_model(num_of_nodes, seed=0)
+    new_model = model.transform(SortGraph())
+
+    # Test
+    ret = new_model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"], "Nodes are not topologically sorted."
+
+
+def test_sort_nonlinear_graph():
+    ch = 2
+    ifmdim = 16
+    input_shape = (1, ch, ifmdim, ifmdim)
+
+    top_in = helper.make_tensor_value_info("top_in", TensorProto.FLOAT, input_shape)
+    top_out = helper.make_tensor_value_info("top_out", TensorProto.FLOAT, input_shape)
+
+    num_of_params = 8
+    value_info = []
+    for i in range(num_of_params):
+        value_info += [
+            helper.make_tensor_value_info("p" + str(i), TensorProto.FLOAT, input_shape)
+        ]
+
+    modelproto = helper.make_model(
+        helper.make_graph(
+            name="test",
+            inputs=[top_in],
+            outputs=[top_out],
+            value_info=value_info,
+            nodes=[
+                # Not sorted nodes
+                helper.make_node("Mul", ["fork1", "p2"], ["t3"]),
+                helper.make_node("Add", ["t4", "p3"], ["t5"]),
+                helper.make_node("Add", ["t2", "t3"], ["t4"]),
+                helper.make_node("Add", ["t6", "t7"], ["t8"]),
+                helper.make_node("Add", ["fork3", "fork3"], ["top_out"]),
+                helper.make_node("Mul", ["t5", "p4"], ["fork2"]),
+                helper.make_node("Add", ["top_in", "p0"], ["fork1"]),
+                helper.make_node("Mul", ["fork1", "p1"], ["t2"]),
+                helper.make_node("Add", ["fork2", "p5"], ["t6"]),
+                helper.make_node("Add", ["fork2", "p6"], ["t7"]),
+                helper.make_node("Mul", ["t8", "p7"], ["fork3"]),
+            ],
+        )
+    )
+    model = ModelWrapper(modelproto)
+    model = model.transform(InferShapes())
+
+    np.random.seed(0)
+    for i in range(num_of_params):
+        model.set_initializer(
+            "p" + str(i), np.random.rand(*input_shape).astype(np.float32)
+        )
+
+    new_model = model.transform(SortGraph())
+
+    # Test
+    ret = new_model.analysis(ta.nodes_topologically_sorted)
+    assert ret["nodes_topologically_sorted"], "Nodes are not topologically sorted."
+
+
+if __name__ == "__main__":
+    import time
+
+    sizes = [10, 50, 100, 500, 1000]
+    times = []
+    reps = 10
+
+    print("SortGraph performance test:")
+    print("Test sizes", sizes)
+    print("Repetitions per size:", reps)
+    for sz in sizes:
+        acc_time = 0
+        print(" Testing size ", sz)
+        for i in range(reps):
+            # it should take the same time even with the sorted one
+            # but better new model each time as it is a more general approach
+            model = make_randomly_sorted_linear_model(sz)  # new model as seed is None
+            bef = time.time()
+            new_model = model.transform(SortGraph(), make_deepcopy=False)
+            acc_time += time.time() - bef
+
+        times += [acc_time / reps]
+
+    # print csv
+    print("\nnum_of_nodes,  seconds")
+    for sz, tm in zip(sizes, times):
+        print("{:12d}, {:6.4e}".format(sz, tm))
+
+    # plot
+    # import matplotlib.pyplot as plt
+    # plt.plot(sizes,times,"--o")
+    # plt.grid(True)
diff --git a/tests/transformation/test_topk_insert.py b/tests/transformation/test_topk_insert.py
index ac32c30edbbf466b2b441bcc92975a7d50f42bda..a9faac4df0caf973d9aae6430e007eac349a7c43 100644
--- a/tests/transformation/test_topk_insert.py
+++ b/tests/transformation/test_topk_insert.py
@@ -1,3 +1,4 @@
+import os
 import onnx
 from finn.util.test import get_test_model_trained
 import brevitas.onnx as bo
@@ -17,10 +18,10 @@ from pkgutil import get_data
 
 import pytest
 
-export_onnx_path = "test_output_lfc.onnx"
+export_onnx_path = "test_topk_insert.onnx"
 
 
-@pytest.mark.parametrize("k", [1, 5, 10])
+@pytest.mark.parametrize("k", [1, 2])
 def test_topk_insert(k):
     tfc = get_test_model_trained("TFC", 1, 1)
     bo.export_finn_onnx(tfc, (1, 1, 28, 28), export_onnx_path)
@@ -56,3 +57,4 @@ def test_topk_insert(k):
     output_pysim_topk = output_pysim_topk.astype(np.int).flatten()
 
     assert np.array_equal(output_golden_topk, output_pysim_topk)
+    os.remove(export_onnx_path)
diff --git a/tests/util/test_create.py b/tests/util/test_create.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e236978592b02e1c18b03aba56ff8b2369311a6
--- /dev/null
+++ b/tests/util/test_create.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2020, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import finn.util.create as create
+from finn.core.datatype import DataType
+
+
+@pytest.mark.parametrize("bitwidth", [DataType.BIPOLAR, DataType.INT2, DataType.INT4])
+def test_hls_random_mlp_maker(bitwidth):
+    w = bitwidth
+    a = bitwidth
+    layer_spec = [
+        {
+            "mw": 185,
+            "mh": 100,
+            "simd": 185,
+            "pe": 100,
+            "idt": DataType.BIPOLAR,
+            "wdt": w,
+            "act": a,
+        },
+        {"mw": 100, "mh": 100, "simd": 100, "pe": 100, "idt": a, "wdt": w, "act": a},
+        {"mw": 100, "mh": 100, "simd": 100, "pe": 100, "idt": a, "wdt": w, "act": a},
+        {"mw": 100, "mh": 100, "simd": 100, "pe": 100, "idt": a, "wdt": w, "act": a},
+        {
+            "mw": 100,
+            "mh": 1,
+            "simd": 100,
+            "pe": 1,
+            "idt": a,
+            "wdt": w,
+            "act": DataType.BIPOLAR,
+        },
+    ]
+
+    ret = create.hls_random_mlp_maker(layer_spec)
+    assert len(ret.graph.node) == 5
+    # ret.save("mlp-%s.onnx" % str(bitwidth))
diff --git a/tests/util/test_data_packing.py b/tests/util/test_data_packing.py
index 28f1d56d0dbc5451ccad3d36b4b1d4c6bed4f63e..7b77c4be20c1f41c11b53a9b65b79441c9bbbe47 100644
--- a/tests/util/test_data_packing.py
+++ b/tests/util/test_data_packing.py
@@ -47,6 +47,7 @@ from finn.util.data_packing import (
 
 @pytest.mark.parametrize("dtype", [DataType.BINARY, DataType.INT2, DataType.INT32])
 @pytest.mark.parametrize("test_shape", [(1, 2, 4), (1, 1, 64), (2, 64)])
+@pytest.mark.vivado
 def test_npy2apintstream(test_shape, dtype):
     ndarray = cutil.gen_finn_dt_tensor(dtype, test_shape)
     test_dir = cutil.make_build_dir(prefix="test_npy2apintstream_")