diff --git a/.coveragerc b/.coveragerc index f18e2e7e4fc546427500e673b350fb3609d59124..e3bc60d397a0653ccbc3a94647bc691f11d2acc3 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # .coveragerc to control coverage.py [run] branch = True diff --git a/.gitignore b/.gitignore index c121bbd8e09a08d7f963ca4c5b0efd6f8faf1498..91879240b36709b5c827ec951366cc55ad515cce 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # Temporary and binary files *~ *.py[cod] @@ -49,7 +77,9 @@ MANIFEST .venv*/ # Cloned dependencies for Docker -brevitas/ -brevitas_cnv_lfc/ -cnpy/ -finn-hlslib/ +/brevitas/ +/brevitas_cnv_lfc/ +/cnpy/ +/finn-hlslib/ +/pyverilator/ +/PYNQ-HelloWorld/ diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index 4a48dba8d2bcd070f0644cace52089cc21ab8b00..0000000000000000000000000000000000000000 --- a/.isort.cfg +++ /dev/null @@ -1,10 +0,0 @@ -[settings] -line_length=88 -indent=' ' -skip=.tox,.venv,build,dist -known_standard_library=setuptools,pkg_resources -known_test=pytest -known_first_party=finn -sections=FUTURE,STDLIB,COMPAT,TEST,THIRDPARTY,FIRSTPARTY,LOCALFOLDER -default_section=THIRDPARTY -multi_line_output=3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b3a942a1f2996f8107e324199ace66101926a19a..8e4e82db3a046e454373c2f0b58d55865cda9c5b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + exclude: '^docs/conf.py' repos: @@ -23,8 +51,3 @@ repos: args: ['--fix=no'] - id: flake8 args: ['--max-line-length=88'] # default of Black - -- repo: https://github.com/pre-commit/mirrors-isort - rev: v4.3.4 - hooks: - - id: isort diff --git a/.travis.yml b/.travis.yml index 597b5b72814aca8ef0de18520730f9b0da828a7e..9319286ce43bf9249f2d3c4cdbf83df59894c7d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # Travis configuration file using the build matrix feature # Read more under http://docs.travis-ci.com/user/build-configuration/ # THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! diff --git a/AUTHORS.rst b/AUTHORS.rst index 4b2194d1322981c471d6f77d994371e68ba519d0..a87cf170b065879f0c5e01e4726bc57608e5c4f2 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -2,4 +2,6 @@ Contributors ============ -* Yaman Umuroglu <yamanu@xilinx.com> +* Yaman Umuroglu +* Jakoba Petri-Koenig +* Andrea Rigoni diff --git a/Dockerfile b/Dockerfile index 3f33ae8d63c2d30f0b1aef2f2e933dcd153e9194..c220e6ac6f4f4b24f2a10af778a0740137ee949f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + FROM pytorch/pytorch:1.1.0-cuda10.0-cudnn7.5-devel MAINTAINER Yaman Umuroglu <yamanu@xilinx.com> ARG PYTHON_VERSION=3.6 @@ -11,6 +39,16 @@ RUN apt update; apt install nano RUN pip install jupyter RUN pip install netron RUN pip install matplotlib +RUN pip install pytest-dependency +RUN apt-get update +RUN apt-get -y upgrade +RUN apt-get install -y build-essential libglib2.0-0 libsm6 libxext6 libxrender-dev +RUN apt install verilator +RUN apt-get -y install sshpass +RUN echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config +RUN pip install sphinx +RUN pip install sphinx_rtd_theme + # Note that we expect the cloned finn directory on the host to be # mounted on /workspace/finn -- see run-docker.sh for an example @@ -20,13 +58,16 @@ RUN pip install matplotlib ENV PYTHONPATH "${PYTHONPATH}:/workspace/finn/src" ENV PYTHONPATH "${PYTHONPATH}:/workspace/brevitas_cnv_lfc/training_scripts" ENV PYTHONPATH "${PYTHONPATH}:/workspace/brevitas" - +ENV PYTHONPATH "${PYTHONPATH}:/workspace/pyverilator" +ENV PYNQSHELL_PATH "/workspace/PYNQ-HelloWorld/boards" ARG GID ARG GNAME ARG UNAME ARG UID ARG PASSWD +ARG JUPYTER_PORT +ARG NETRON_PORT RUN groupadd -g $GID $GNAME RUN useradd -M -u $UID $UNAME -g $GNAME @@ -37,4 +78,8 @@ RUN ln -s /workspace /home/$UNAME RUN chown -R $UNAME:$GNAME /home/$UNAME USER $UNAME +RUN echo "source \$VIVADO_PATH/settings64.sh" >> /home/$UNAME/.bashrc +RUN echo "PS1='\[\033[1;36m\]\u\[\033[1;31m\]@\[\033[1;32m\]\h:\[\033[1;35m\]\w\[\033[1;31m\]\$\[\033[0m\] '" >> /home/$UNAME/.bashrc +EXPOSE $JUPYTER_PORT +EXPOSE $NETRON_PORT WORKDIR /home/$UNAME/finn diff --git a/LICENSE.txt b/LICENSE.txt index 36b2c35b3b01a3ff618a757f4dae5c7ae18169ff..278564a5a4678868d831e7411dc17e5b8dbb2e8f 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2019, Xilinx +Copyright (c) 2020, Xilinx All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index 609995df32aacdc073ca2013019c512e944e467e..0a70f27b675c105d76259edcacb78251419a5205 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,62 @@ -# FINN +## <img src=https://raw.githubusercontent.com/Xilinx/finn/master/docs/img/finn-logo.png width=128/> Fast, Scalable Quantized Neural Network Inference on FPGAs -Fast, Scalable Quantized Neural Network Inference on FPGAs + + +<img align="left" src="https://raw.githubusercontent.com/Xilinx/finn/master/docs/img/finn-stack.png" alt="drawing" style="margin-right: 20px" width="250"/> [](https://gitter.im/xilinx-finn/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) +[](http://finn.readthedocs.io/) + +FINN is an experimental framework from Xilinx Research Labs to explore deep neural network +inference on FPGAs. +It specifically targets <a href="https://github.com/maltanar/qnn-inference-examples" target="_blank">quantized neural +networks</a>, with emphasis on +generating dataflow-style architectures customized for each network. +The resulting FPGA accelerators can yield very high classification rates, or conversely be run with a slow clock for very low power consumption. +The framework is fully open-source in order to give a higher degree of flexibility, and is intended to enable neural network research spanning several layers of the software/hardware abstraction stack. + +For more general information about FINN, please visit the [project page](https://xilinx.github.io/finn/), check out the [publications](https://xilinx.github.io/finn/publications) or some of the [demos](https://xilinx.github.io/finn/demos). + +## Getting Started + +Please see the [Getting Started](https://finn.readthedocs.io/en/latest/getting_started.html) page for more information on requirements, installation, and how to run FINN in different modes. Due to the complex nature of the dependencies of the project, we only support Docker-based deployment at this time. + +## What's New in FINN? + +* **2020-02-27:** FINN v0.2b (beta) is released, which is a clean-slate reimplementation of the framework. Currently only fully-connected networks are supported for the end-to-end flow. Please see the release blog post for a summary of the key features. + +## Documentation + +You can view the documentation on [readthedocs](https://finn.readthedocs.io) or build them locally using `python setup.py doc` from inside the Docker container. Additionally, there is a series of [Jupyter notebook tutorials](https://github.com/Xilinx/finn/tree/master/notebooks), which we recommend running from inside Docker for a better experience. + +## Community +We have a [gitter channel](https://gitter.im/xilinx-finn/community) where you can ask questions. You can use the GitHub issue tracker to report bugs, but please don't file issues to ask questions as this is better handled in the gitter channel. We also heartily welcome contributors to the project but do not yet have guidelines in place for this, so if you are interested just get in touch over gitter. -## Description +## Citation +The current implementation of the framework is based on the following publications. Please consider citing them if you find FINN useful. -FINN is an experimental framework from Xilinx Research Labs to explore deep neural network inference on FPGAs. It specifically targets quantized neural networks, with emphasis on generating dataflow-style architectures customized for each network. -For more information, please visit the [project page](https://xilinx.github.io/finn/). + @article{blott2018finn, + title={FINN-R: An end-to-end deep-learning framework for fast exploration of quantized neural networks}, + author={Blott, Michaela and Preu{\ss}er, Thomas B and Fraser, Nicholas J and Gambardella, Giulio and O’brien, Kenneth and Umuroglu, Yaman and Leeser, Miriam and Vissers, Kees}, + journal={ACM Transactions on Reconfigurable Technology and Systems (TRETS)}, + volume={11}, + number={3}, + pages={1--23}, + year={2018}, + publisher={ACM New York, NY, USA} + } -A new, more modular version of FINN is currently under development on GitHub, and we welcome contributions from the community! -Stay tuned for more updates. + @inproceedings{finn, + author = {Umuroglu, Yaman and Fraser, Nicholas J. and Gambardella, Giulio and Blott, Michaela and Leong, Philip and Jahre, Magnus and Vissers, Kees}, + title = {FINN: A Framework for Fast, Scalable Binarized Neural Network Inference}, + booktitle = {Proceedings of the 2017 ACM/SIGDA International Symposium on Field-Programmable Gate Arrays}, + series = {FPGA '17}, + year = {2017}, + pages = {65--74}, + publisher = {ACM} + } ## Old version diff --git a/docs/_layouts/default.html b/docs/_layouts/default.html index 8b17710446ff6c274bbb746e51387a55d3054c58..e594c6aaf0d77abb238f6244ab87c0412b7b2528 100644 --- a/docs/_layouts/default.html +++ b/docs/_layouts/default.html @@ -29,6 +29,7 @@ <li class="download"><a class="buttons" style="background: none" href="https://xilinx.github.io/finn/about">About</a></li> <li class="download"><a class="buttons" style="background: none" href="https://xilinx.github.io/finn/blog">Blog</a></li> <li class="download"><a class="buttons" style="background: none" href="https://xilinx.github.io/finn/demos">Demos</a></li> + <li class="download"><a class="buttons" style="background: none" href="https://finn.readthedocs.io">Documentation</a></li> <li class="download"><a class="buttons" style="background: none" href="https://github.com/Xilinx/brevitas">Brevitas</a></li> <li class="download"><a class="buttons" style="background: none" href="https://github.com/Xilinx/finn-hlslib">HLS Library</a></li> <li class="download"><a class="buttons" style="background: none" href="https://xilinx.github.io/finn/publications">Publications</a></li> diff --git a/docs/example-networks.md b/docs/example-networks.md index 3ec207b732b3650bab317c0c4d0c167dab305c63..060836ff95d89a752cc70426674cd429e74fdd06 100644 --- a/docs/example-networks.md +++ b/docs/example-networks.md @@ -1,10 +1,5 @@ # Status for FINN example networks -| | LFC-w1a1 | LFC-w1a2 | CNV-w1a1 | CNV-w1a2 | CNV-w2a2 | -|--------------------------- |---------- |---------- |---------- |---------- |---------- | -| Export/Import | x | x | x | | | -| Streamlining | x | x | | | | -| Convert to HLS layers | x | | | | | -| hlslib simulation | x | | | | | -| Monolithic HLS generation | | | | | | -| Hardware demo | | | | | | +This page has moved to: + +https://finn-dev.readthedocs.io/en/latest/example_networks.html diff --git a/docs/finn/Makefile b/docs/finn/Makefile index 12c4589fbdfe3a9298f8eac60910f798c5899789..d4bb2cbb9eddb1bb1b4f366623044af8e4830919 100644 --- a/docs/finn/Makefile +++ b/docs/finn/Makefile @@ -1,193 +1,20 @@ -# Makefile for Sphinx documentation +# Minimal makefile for Sphinx documentation # -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = ../build/sphinx/ -AUTODOCDIR = api -AUTODOCBUILD = sphinx-apidoc -PROJECT = FINN -MODULEDIR = ../src/finn - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext doc-requirements +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build +# Put it first so that "make" without argument is like "make help". help: - @echo "Please use \`make <target>' where <target> is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* $(AUTODOCDIR) - -$(AUTODOCDIR): $(MODULEDIR) - mkdir -p $@ - $(AUTODOCBUILD) -f -o $@ $^ - -doc-requirements: $(AUTODOCDIR) - -html: doc-requirements - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: doc-requirements - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: doc-requirements - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: doc-requirements - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: doc-requirements - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: doc-requirements - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: doc-requirements - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/$(PROJECT).qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/$(PROJECT).qhc" - -devhelp: doc-requirements - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $HOME/.local/share/devhelp/$(PROJECT)" - @echo "# ln -s $(BUILDDIR)/devhelp $HOME/.local/share/devhelp/$(PROJEC)" - @echo "# devhelp" - -epub: doc-requirements - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -patch-latex: - find _build/latex -iname "*.tex" | xargs -- \ - sed -i'' 's~includegraphics{~includegraphics\[keepaspectratio,max size={\\textwidth}{\\textheight}\]{~g' - -latex: doc-requirements - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - $(MAKE) patch-latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: doc-requirements - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - $(MAKE) patch-latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: doc-requirements - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: doc-requirements - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: doc-requirements - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: doc-requirements - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: doc-requirements - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: doc-requirements - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: doc-requirements - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: doc-requirements - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: doc-requirements - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -xml: doc-requirements - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." +.PHONY: help Makefile -pseudoxml: doc-requirements - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/finn/_static/.gitignore b/docs/finn/_static/.gitignore deleted file mode 100644 index 3c963632027b176c1146e6cca3b2d1aab781d766..0000000000000000000000000000000000000000 --- a/docs/finn/_static/.gitignore +++ /dev/null @@ -1 +0,0 @@ -# Empty directory diff --git a/docs/finn/authors.rst b/docs/finn/authors.rst deleted file mode 100644 index cd8e0913a8fa9ba3a03696eb3f0a03730d14a485..0000000000000000000000000000000000000000 --- a/docs/finn/authors.rst +++ /dev/null @@ -1,2 +0,0 @@ -.. _authors: -.. include:: ../AUTHORS.rst diff --git a/docs/finn/brevitas_export.rst b/docs/finn/brevitas_export.rst new file mode 100644 index 0000000000000000000000000000000000000000..443b692a2d05b48b2e395373411c3d5382825c6c --- /dev/null +++ b/docs/finn/brevitas_export.rst @@ -0,0 +1,17 @@ +.. _brevitas_export: + +*************** +Brevitas Export +*************** + +.. note:: **This website is currently under construction.** + +.. image:: /img/brevitas-export.png + :scale: 70% + :align: center + +FINN expects an ONNX model as input. This can be a model trained with `Brevitas <https://github.com/Xilinx/brevitas>`_. Brevitas is a PyTorch library for quantization-aware training and the FINN Docker image comes with several `example Brevitas networks <https://github.com/maltanar/brevitas_cnv_lfc>`_. Brevitas provides an export of a quantized network in ONNX representation. The resulting model consists only of `ONNX standard nodes <https://github.com/onnx/onnx/blob/master/docs/Operators.md>`_, but also contains additional attributes for the ONNX nodes to represent low precision datatypes. To work with the model it is wrapped into :ref:`modelwrapper` provided by FINN. + +At this stage we can already use the functional verification flow to simulate the model using Python, this is marked in the graphic with the dotted arrow. For more details please have look at :ref:`verification`. + +The model can now be further processed in FINN, the next flow step is :ref:`nw_prep`. diff --git a/docs/finn/changelog.rst b/docs/finn/changelog.rst deleted file mode 100644 index 871950df3f80fa9315dcec169c6af0a0a000f62e..0000000000000000000000000000000000000000 --- a/docs/finn/changelog.rst +++ /dev/null @@ -1,2 +0,0 @@ -.. _changes: -.. include:: ../CHANGELOG.rst diff --git a/docs/finn/conf.py b/docs/finn/conf.py index ba93a027cd73e8782ae541c43455bc180fd91dbb..1bd179c3f7904ba102f7a9b4f2edc2739ba58183 100644 --- a/docs/finn/conf.py +++ b/docs/finn/conf.py @@ -1,275 +1,55 @@ -# -*- coding: utf-8 -*- +# Configuration file for the Sphinx documentation builder. # -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import inspect -import os -import shutil -import sys +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html -__location__ = os.path.join( - os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe())) -) +# -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.join(__location__, "../src")) - -# -- Run sphinx-apidoc ------------------------------------------------------ -# This hack is necessary since RTD does not issue `sphinx-apidoc` before running -# `sphinx-build -b html . _build/html`. See Issue: -# https://github.com/rtfd/readthedocs.org/issues/1139 -# DON'T FORGET: Check the box "Install your project inside a virtualenv using -# setup.py install" in the RTD Advanced Settings. -# Additionally it helps us to avoid running apidoc manually - -try: # for Sphinx >= 1.7 - from sphinx.ext import apidoc -except ImportError: - from sphinx import apidoc - -output_dir = os.path.join(__location__, "api") -module_dir = os.path.join(__location__, "../src/finn") -try: - shutil.rmtree(output_dir) -except FileNotFoundError: - pass - -try: - import sphinx - from pkg_resources import parse_version +# +import os +import sys +sys.path.insert(0, os.path.abspath('../../src/')) - cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" - cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) - args = cmd_line.split(" ") - if parse_version(sphinx.__version__) >= parse_version("1.7"): - args = args[1:] +# -- Project information ----------------------------------------------------- - apidoc.main(args) -except Exception as e: - print("Running `sphinx-apidoc` failed!\n{}".format(e)) +project = 'FINN' +copyright = '2020, Xilinx' +author = 'Y. Umuroglu and J. Petri-Koenig' -# -- General configuration ----------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' +# -- General configuration --------------------------------------------------- -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.autosummary", - "sphinx.ext.viewcode", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.ifconfig", - "sphinx.ext.mathjax", - "sphinx.ext.napoleon", ] +extensions.append('sphinx.ext.autodoc') # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix of source filenames. -source_suffix = ".rst" - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"FINN" -copyright = u"2019, Yaman Umuroglu" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = "" # Is set by calling `setup.py docs` -# The full version, including alpha/beta/rc tags. -release = "" # Is set by calling `setup.py docs` - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' +templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - - -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = {"sidebar_width": "300px", "page_width": "1200px"} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# "<project> v<release> documentation". -try: - from finn import __version__ as version -except ImportError: - pass -else: - release = version - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = "" - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None +# +html_theme = 'sphinx_rtd_theme' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a <link> tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = "finn-doc" - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ("index", "user_guide.tex", u"FINN Documentation", u"Yaman Umuroglu", "manual") -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = "" - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True +html_static_path = ['_static'] -# -- External mapping ------------------------------------------------------------ -python_version = ".".join(map(str, sys.version_info[0:2])) -intersphinx_mapping = { - "sphinx": ("http://www.sphinx-doc.org/en/stable", None), - "python": ("https://docs.python.org/" + python_version, None), - "matplotlib": ("https://matplotlib.org", None), - "numpy": ("https://docs.scipy.org/doc/numpy", None), - "sklearn": ("http://scikit-learn.org/stable", None), - "pandas": ("http://pandas.pydata.org/pandas-docs/stable", None), - "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), -} +master_doc = 'index' diff --git a/docs/finn/end_to_end_flow.rst b/docs/finn/end_to_end_flow.rst new file mode 100644 index 0000000000000000000000000000000000000000..1936aad186a96edbc6fe33dbccbe0c06826fab5f --- /dev/null +++ b/docs/finn/end_to_end_flow.rst @@ -0,0 +1,25 @@ +*************** +End-to-End Flow +*************** + +.. note:: **This website is currently under construction.** + +The following image shows an example end-to-end flow in FINN, starting from a trained PyTorch/Brevitas network and going all the way to a running FPGA accelerator. +As you can see in the picture, FINN has a high modularity and has the property that the flow can be stopped at any point and the intermediate result can be used for further processing or other purposes. This enables a wide range of users to benefit from FINN, even if they do not use the whole flow. + +.. image:: ../../notebooks/end2end_example/finn-design-flow-example.svg + :scale: 50% + :align: center + +The cylinder-like fields show the state of the network representation in the respective step. The rectangular fields represent the transformations that are applied to the network to achieve a certain result. The diagram is divided into five blocks, each of it includes several flow steps. The flow starts in top left corner with Brevitas export (pink block), followed by the preparation of the network (grey block) for the Vivado HLS and Vivado IPI (yellow block). There is also a section for testing and verification in software (green block) and the hardware generation and deployment on the PYNQ board (red block). + +This example flow is covered in the `end2end_example <https://github.com/Xilinx/finn/tree/master/notebooks/end2end_example>`_ Jupyter notebooks. +For a more detailed overview about the different flow sections, please have a look at the corresponding pages: + +.. toctree:: + + brevitas_export + nw_prep + vivado_synth + pynq_deploy + verification diff --git a/docs/finn/example_networks.rst b/docs/finn/example_networks.rst new file mode 100644 index 0000000000000000000000000000000000000000..e8b29a5fb862f6ebe917327a8f6c96edbf529b96 --- /dev/null +++ b/docs/finn/example_networks.rst @@ -0,0 +1,36 @@ +.. _example_networks: + +**************** +Example Networks +**************** + +FINN uses `several pre-trained QNNs <https://github.com/maltanar/brevitas_cnv_lfc>`_ that serve as examples and testcases. +You can find a status summary below for each network. + +* TFC, SFC, LFC... are fully-connected networks trained on the MNIST dataset +* CNV is a convolutional network trained on the CIFAR-10 dataset +* w\_a\_ refers to the quantization used for the weights (w) and activations (a) in bits + +The rows in the table are different steps of the FINN end-to-end flow. +If a particular network is supported for a particular step in the current FINN +version, this is indicated by an x mark in the table. + ++-----------------------+------------+----------+----------+----------+----------+----------+ +| FINN step | Basic test | TFC-w1a1 | TFC-w1a2 | CNV-w1a1 | CNV-w1a2 | CNV-w2a2 | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| Export/Import | x | x | x | x | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| Streamlining | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| Convert to HLS layers | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| Stitched IP | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| Hardware test | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| npysim | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| rtlsim node-by-node | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ +| rtlsim stitched IP | x | x | x | | | | ++-----------------------+------------+----------+----------+----------+----------+----------+ diff --git a/docs/finn/genindex.rst b/docs/finn/genindex.rst new file mode 100644 index 0000000000000000000000000000000000000000..50a4d07fe5ecd9528d7d3d30f93193a6da650327 --- /dev/null +++ b/docs/finn/genindex.rst @@ -0,0 +1,5 @@ +.. This file is a placeholder and will be replaced + +***** +Index +***** diff --git a/docs/finn/getting_started.rst b/docs/finn/getting_started.rst new file mode 100644 index 0000000000000000000000000000000000000000..72ec0c32f4f797476097e61920c72077f6801ddd --- /dev/null +++ b/docs/finn/getting_started.rst @@ -0,0 +1,77 @@ +.. _getting_started: + +*************** +Getting Started +*************** + +.. note:: **This website is currently under construction.** + +How to use the FINN compiler +============================ +The FINN compiler should not be thought of a single pushbutton tool that does everything for you, but rather as a collection of scripts/tools that will help you convert a QNN into a custom FPGA accelerator that performs high-performance inference. We do provide several examples of taking trained networks all the way down to FPGA bitfiles, but if you are trying to do this for custom networks you will have to write your own Python scripts that call the appropriate FINN Compiler functions that process your design correctly, or adding new functions as required. + +Requirements +============ + +* Ubuntu 18.04 +* Docker +* A working Vivado 2019.1 installation +* A `VIVADO_PATH` environment variable pointing to the Vivado installation directory (e.g. the directory where settings64.sh is located) +* (optional) A PYNQ board with a network connection + +Running FINN in Docker +====================== +We use Docker extensively for developing and deploying FINN. If you are not familiar with Docker, there are many excellent `online resources <https://docker-curriculum.com/>`_ to get started. There is a Dockerfile in the root of the repository, as well as a `run-docker.sh` script that can be launched in the following modes: + +Getting an interactive shell for development or experimentation +*************************************************************** +:: + + sh run_docker.sh + +Simply running sh run-docker.sh without any additional arguments will clone the dependency repos, create a Docker container and give you a terminal with you can use for development for experimentation. + +.. warning:: The Docker container is spawned with the `--rm` option, so make sure that any important files you created inside the container are either in the /workspace/finn folder (which is mounted from the host computer) or otherwise backed up. + +.. note:: **Develop from host, run inside container:** The FINN repository directory will be mounted from the host, so that you can use a text editor on your host computer to develop and the changes will be reflected directly inside the container. + +Running the Jupyter notebooks +***************************** +:: + + sh run-docker.sh notebook + +This will launch the `Jupyter notebook <https://jupyter.org/>`_ server inside a Docker container, and print a link on the terminal that you can open in your browser to run the FINN notebooks or create new ones. +.. note:: The link will look something like this (the token you get will be different): +http://127.0.0.1:8888/?token=f5c6bd32ae93ec103a88152214baedff4ce1850d81065bfc + +The run-docker.sh script forwards ports 8888 for Jupyter and 8081 for Netron, and launches the notebook server with appropriate arguments. + +Running the test suite directly +******************************* +:: + + sh run-docker.sh test + +FINN comes with a set of tests which can be launched using the command above. Note that some of the tests involve extra compilation and the entire test suite may take some time to complete. + +Running the test suite using Jenkins +************************************ +:: + + sh run-docker.sh jenkins + +This will launch `Jenkins <https://jenkins.io/>`_ inside a Docker container and print an initial password for the user to use together with the username "admin" to open Jenkins in the webbrowser. The script forwards port 8080 for Jenkins and also configures a smee client to access port 8080. `Smee <https://smee.io/>`_ is a webhook payload delivery service and the FINN GitHub repository has a webhook set up to trigger the smee client (that is set in the run_docker script) when a push event is happening. Through Jenkins the user can set up a test for FINN, which is started at every push event. + +Environment variables +********************** + +Prior to running the `run-docker.sh` script, there are several environment variables you can set to configure certain aspects of FINN. +These are summarized below: + +* `VIVADO_PATH` points to your Vivado installation on the host +* `JUPYTER_PORT` (default 8888) changes the port for Jupyter inside Docker +* `NETRON_PORT` (default 8081) changes the port for Netron inside Docker +* `PYNQ_BOARD` specifies the type of PYNQ board used (Pynq-Z1, Pynq-Z2, Ultra96, ZCU104) for the test suite +* `PYNQ_USERNAME` and `PYNQ_PASSWORD` specify the PYNQ board access credentials for the test suite +* `PYNQ_TARGET_DIR` specifies the target dir on the PYNQ board for the test suite diff --git a/docs/finn/img/brevitas-export.png b/docs/finn/img/brevitas-export.png new file mode 100755 index 0000000000000000000000000000000000000000..94c05ebc85e7893f4dcecfc14f3d3d7a230c86a2 Binary files /dev/null and b/docs/finn/img/brevitas-export.png differ diff --git a/docs/finn/img/nw-prep.png b/docs/finn/img/nw-prep.png new file mode 100755 index 0000000000000000000000000000000000000000..8b31b14bdfd5c6f042aabde9c4f5fcbc5f513fd5 Binary files /dev/null and b/docs/finn/img/nw-prep.png differ diff --git a/docs/finn/img/pynq-deploy.png b/docs/finn/img/pynq-deploy.png new file mode 100755 index 0000000000000000000000000000000000000000..ba8e889c040545ffdb0daf0a6cc01a2eef423db8 Binary files /dev/null and b/docs/finn/img/pynq-deploy.png differ diff --git a/docs/finn/img/verification.png b/docs/finn/img/verification.png new file mode 100755 index 0000000000000000000000000000000000000000..71645fef72496258544641bb09e7b529656b8812 Binary files /dev/null and b/docs/finn/img/verification.png differ diff --git a/docs/finn/img/vivado-synth.png b/docs/finn/img/vivado-synth.png new file mode 100755 index 0000000000000000000000000000000000000000..b585910692f225ca8c4717ad0bab623534be0b4c Binary files /dev/null and b/docs/finn/img/vivado-synth.png differ diff --git a/docs/finn/index.rst b/docs/finn/index.rst index 66f8d97118ae762fb84bcb72c9a81c7cce2a5ba4..4a9452c7274c81dae9460eb17638638dab6963cb 100644 --- a/docs/finn/index.rst +++ b/docs/finn/index.rst @@ -1,59 +1,42 @@ -==== +.. finn documentation master file, created by + sphinx-quickstart on Mon Feb 24 14:55:45 2020. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +**** FINN -==== +**** +.. note:: **This website is currently under construction.** + +Welcome to the FINN Read the Docs website. This website is about the new, more modular version of FINN, which is currently under development on GitHub, and we welcome contributions from the community! Stay tuned for more updates. -This is the documentation of **FINN**. +What is FINN? +============= +'FINN' is colloquially used to refer to two separate but highly related things: -.. note:: +* The FINN project, which is an experimental framework from Xilinx Research Labs to explore deep neural network inference on FPGAs. It specifically targets quantized neural networks, with emphasis on generating dataflow-style architectures customized for each network. It includes tools for training quantized neural networks such as Brevitas, the FINN compiler, and the finn-hlslib Vivado HLS library of FPGA components for QNNs. An overview of the project can be taken from the following graphic and details can be seen on the `FINN project homepage <https://xilinx.github.io/finn/>`_. - This is the main page of your project's `Sphinx`_ documentation. - It is formatted in `reStructuredText`_. Add additional pages - by creating rst-files in ``docs`` and adding them to the `toctree`_ below. - Use then `references`_ in order to link them from this page, e.g. - :ref:`authors` and :ref:`changes`. +.. image:: ../img/finn-stack.png + :scale: 40% + :align: center - It is also possible to refer to the documentation of other Python packages - with the `Python domain syntax`_. By default you can reference the - documentation of `Sphinx`_, `Python`_, `NumPy`_, `SciPy`_, `matplotlib`_, - `Pandas`_, `Scikit-Learn`_. You can add more by extending the - ``intersphinx_mapping`` in your Sphinx's ``conf.py``. +* The repository, this Read the Docs website corresponds to. It is about the FINN compiler, which is the centerpiece of the FINN project. Details can be looked up directly in the `FINN GitHub repository <https://github.com/Xilinx/finn>`_. To learn more about the FINN compiler, use this website and for a hands-on experience the repository contains some Jupyter notebooks which can be found under this `link <https://github.com/Xilinx/finn/tree/dev/notebooks>`_. - The pretty useful extension `autodoc`_ is activated by default and lets - you include documentation from docstrings. Docstrings can be written in - `Google style`_ (recommended!), `NumPy style`_ and `classical style`_. +More FINN Resources +=================== +* `List of publications <https://github.com/Xilinx/finn/blob/master/docs/publications.md>`_ -Contents -======== +* `Roadmap <https://github.com/Xilinx/finn/projects/1>`_ .. toctree:: - :maxdepth: 2 - - License <license> - Authors <authors> - Changelog <changelog> - Module Reference <api/modules> - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - -.. _toctree: http://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html -.. _reStructuredText: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html -.. _references: http://www.sphinx-doc.org/en/stable/markup/inline.html -.. _Python domain syntax: http://sphinx-doc.org/domains.html#the-python-domain -.. _Sphinx: http://www.sphinx-doc.org/ -.. _Python: http://docs.python.org/ -.. _Numpy: http://docs.scipy.org/doc/numpy -.. _SciPy: http://docs.scipy.org/doc/scipy/reference/ -.. _matplotlib: https://matplotlib.org/contents.html# -.. _Pandas: http://pandas.pydata.org/pandas-docs/stable -.. _Scikit-Learn: http://scikit-learn.org/stable -.. _autodoc: http://www.sphinx-doc.org/en/stable/ext/autodoc.html -.. _Google style: https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings -.. _NumPy style: https://numpydoc.readthedocs.io/en/latest/format.html -.. _classical style: http://www.sphinx-doc.org/en/stable/domains.html#info-field-lists + :maxdepth: 5 + :hidden: + + getting_started + tutorials + end_to_end_flow + example_networks + internals + source_code/finn + genindex diff --git a/docs/finn/internals.rst b/docs/finn/internals.rst new file mode 100644 index 0000000000000000000000000000000000000000..1e4ea189db614e2ec9d0713ed3d63696b705762d --- /dev/null +++ b/docs/finn/internals.rst @@ -0,0 +1,140 @@ +********* +Internals +********* + +.. note:: **This website is currently under construction.** + +Intermediate Representation: FINN-ONNX +====================================== + +FINN uses `ONNX <https://github.com/onnx/onnx>`_ as an intermediate representation (IR) for neural networks. As such, almost every component inside FINN uses ONNX and its `Python API <https://github.com/onnx/onnx/blob/master/docs/PythonAPIOverview.md>`_, so you may want to familiarize yourself with how ONNX represents DNNs. Specifically, the `ONNX protobuf description <https://github.com/onnx/onnx/blob/master/onnx/onnx.proto>`_ (or its `human-readable documentation <https://github.com/onnx/onnx/blob/master/docs/IR.md>`_ and the `operator schemas <https://github.com/onnx/onnx/blob/master/docs/Operators.md>`_ are useful as reference documents. We also provide a Jupyter notebook that can help to get familiar with ONNX by showing how to work with a simple ONNX model in FINN, see chapter :ref:`tutorials` for details. + +.. note:: FINN uses ONNX is a specific way that we refer to as FINN-ONNX, and not all ONNX graphs are supported by FINN (and vice versa). + +Custom Quantization Annotations +=============================== + +ONNX does not support datatypes smaller than 8-bit integers, whereas in FINN we are interested in smaller integers down to ternary and bipolar. To make this work, FINN uses the quantization_annotation field in ONNX to annotate tensors with their FINN DataType (:py:mod:`finn.core.datatype.DataType`) information. However, all tensors are expected to use single-precision floating point (float32) storage in FINN. This means we store even a 1-bit value as floating point for the purposes of representation. The FINN compiler flow is responsible for eventually producing a packed representation for the target hardware, where the 1-bit is actually stored as 1-bit. + +Custom Operations/Nodes +======================= + +FINN uses many custom operations (op_type in ONNX NodeProto) that are not defined in the ONNX operator schema. These custom nodes are marked with domain="finn" in the protobuf to identify them as such. These nodes can represent specific operations that we need for low-bit networks, or operations that are specific to a particular hardware backend. To get more familiar with custom operations and how they are created, please take a look in the Jupyter notebook about CustomOps (see chapter :ref:`tutorials` for details) or directly in the module :py:mod:`finn.custom_op`. + +Custom ONNX Execution Flow +========================== + +To verify correct operation of FINN-ONNX graphs, FINN provides its own ONNX execution flow (:py:mod:`finn.core.onnx_exec`). This flow supports the standard set of ONNX operations as well as the custom FINN operations. + +.. warning:: This execution flow is only meant for checking the correctness of models after applying transformations, and not for high performance inference. + +.. _modelwrapper: + +ModelWrapper +============ + +FINN provides a ModelWrapper class (:py:mod:`finn.core.modelwrapper.ModelWrapper`) as a thin wrapper around ONNX to make it easier to analyze and manipulate ONNX graphs. This wrapper provides many helper functions, while still giving full access to the ONNX protobuf representation. + +Some of the helper functions are described in more detail below. + +Create a ModelWrapper instance +------------------------------ +The ModelWrapper instance can be created using a model in .onnx format or by directly passing a ModelProto instance to the wrapper. The code block below gives an example of how to use the wrapper on a model in .onnx format. +:: + + from finn.core.modelwrapper import ModelWrapper + model = ModelWrapper("model.onnx") + +Access the ONNX GraphProto through ModelWrapper +----------------------------------------------- +The ONNX ModelProto can be accessed with following command: +:: + + modelproto = model.model + +The graph can be accessed using: +:: + + graphproto = model.graph + +The node list is accessed by: +:: + + nodes = model.graph.node + +The individual nodes can be selected via their indices. +:: + + # first node + nodes[0] + +The number of all nodes can be determined with the len() function in Python. +:: + + # number of nodes in the graph + len(nodes) + +Helper functions for tensors +---------------------------- + +A list of all tensors (names) can easily be accessed using: +:: + + tensor_list = model.get_all_tensor_names() + +If we take a single tensor from that list (by index), we can determine their producer or consumer node by using one of the following functions. Note that it may be that a tensor does not have a producer or consumer node, for example if the tensor represents a constant that is already set. In that case `None` will be returned. +:: + + # find producer of third tensor in model tensor list + model.find_producer(tensor_list[2]) + + # find consumer of third tensor in model tensor list + model.find_consumer(tensor_list[2]) + +Every tensor has a specific shape, to get or to set this shape these functions can be used: +:: + + # get tensor shape of third tensor in model tensor list + model.get_tensor_shape(tensor_list[2]) + + # set tensor shape of third tensor in model tensor list + tensor_shape = [1, 1, 28, 28] + model.set_tensor_shape(tensor_list[2], tensor_shape) + +Optionally, the dtype (container datatype) of the tensor can also be specified as third argument in the set function. By default it is set to TensorProto.FLOAT. + +As mentioned above there are FINN DataTypes additional to the container datatype, these can be accessed and set for a tensor with the following functions: +:: + + # get tensor dataype of third tensor in model tensor list + model.get_tensor_datatype(tensor_list[2]) + + # set tensor datatype of third tensor in model tensor list + from finn.core.datatype import DataType + + finn_dtype = DataType.BIPOLAR + model.set_tensor_datatype(tensor_list[2], finn_dtype) + +ModelWrapper contains two helper functions for tensor initializers, one to determine the current initializer and one to set the initializer of a tensor. If there is no initializer, None is returned. +:: + + # get tensor initializer of third tensor in model tensor list + model.get_initializer(tensor_list[2]) + +ModelWrapper contains more useful functions, if you are interested please have a look at the ModelWrapper module (:py:mod:`finn.core.modelwrapper.ModelWrapper`) directly. + + +.. _analysis_pass: + +Analysis Pass +============= + +An analysis pass traverses the graph structure and produces information about certain properties. It gets the model in the ModelWrapper as input and returns a dictionary of the properties the analysis extracts. If you are interested in how to write an analysis pass for FINN, please take a look at the Jupyter notebook about how to write an analysis pass, see chapter :ref:`tutorials` for details. For more information about existing analysis passes in FINN, see module :py:mod:`finn.analysis`. + +.. _transformation_pass: + +Transformation Pass +=================== + +A transformation passes changes (transforms) the given model, it gets the model in the ModelWrapper as input and returns the changed model (ModelWrapper) to the FINN flow. Additional the flag *model_was_changed* which indicates if a transformation has to be performed more than once, is returned. If you are interested in how to write a transformation pass for FINN, please take a look at the Jupyter notebook about how to write a transformation pass, see chapter :ref:`tutorials` for details. For more information about existing transformation passes in FINN, see module :py:mod:`finn.transformation`. + diff --git a/docs/finn/license.rst b/docs/finn/license.rst deleted file mode 100644 index 3989c5130411f77da5198a165361ea1b8b2fc1de..0000000000000000000000000000000000000000 --- a/docs/finn/license.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _license: - -======= -License -======= - -.. include:: ../LICENSE.txt diff --git a/docs/finn/make.bat b/docs/finn/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..2119f51099bf37e4fdb6071dce9f451ea44c62dd --- /dev/null +++ b/docs/finn/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/finn/nw_prep.rst b/docs/finn/nw_prep.rst new file mode 100644 index 0000000000000000000000000000000000000000..96dfcc184261b5e24a4e7c51eef74fff55d0d841 --- /dev/null +++ b/docs/finn/nw_prep.rst @@ -0,0 +1,52 @@ +.. _nw_prep: + +******************* +Network Preparation +******************* + +.. note:: **This website is currently under construction.** + +.. image:: /img/nw-prep.png + :scale: 70% + :align: center + +The main principle of FINN are analysis and transformation passes. If you like to have more information about these please have a look at section :ref:`analysis_pass` and :ref:`transformation_pass` or at chapter :ref:`tutorials` about the provided Jupyter notebooks. + +This page is about the network preparation, the flow step that comes after the :ref:`brevitas_export`. Its main idea is to optimize the network and convert the nodes to custom nodes that correspond to `finn-hlslib <https://github.com/Xilinx/finn-hlslib>`_ functions. In this way we get a network that we can bring to hardware with the help of Vivado. For that we have to apply several transformations on the ONNX model, which this flow step receives wrapped in the :ref:`modelwrapper`. + +Various transformations are involved in the network preparation. The following is a short overview of these. + +Tidy-up transformations +======================= + +These transformations do not appear in the diagram above, but are applied in many steps in the FINN flow to postprocess the model after a transformation and/or prepare it for the next transformation. They ensure that all information is set and behave like a "tidy-up". These transformations are the following: + +* :py:mod:`finn.transformation.general.GiveReadableTensorNames` and :py:mod:`finn.transformation.general.GiveUniqueNodeNames` + +* :py:mod:`finn.transformation.infer_datatypes.InferDataTypes` and :py:mod:`finn.transformation.infer_shapes.InferShapes` + +* :py:mod:`finn.transformation.fold_constants.FoldConstants` + +Streamlining Transformations +============================ + +The idea behind streamlining is to eliminate floating point operations in a model by moving them around, collapsing them into one operation and transforming them into multithresholding nodes. Several transformations are involved in this step. For details have a look at the module :py:mod:`finn.transformation.streamline` and for more information on the theoretical background of this, see `this paper <https://arxiv.org/pdf/1709.04060.pdf>`_. + +After this transformation the ONNX model is streamlined and contains now custom nodes in addition to the standard nodes. At this point we can use the :ref:`verification` to simulate the model using Python and in the next step some of the nodes can be converted into HLS layers that correspond to finn_hlslib functions. + +Convert to HLS Layers +===================== + +Pairs of binary XNORPopcountMatMul layers are converted to StreamingFCLayers and following Multithreshold layers are absorbed into the MVTU. The result is a model consisting of a mixture of HLS and non-HLS layers. For more details, see :py:mod:`finn.transformation.fpgadataflow.convert_to_hls_layers`. + +Dataflow Partitioning +===================== + +In the next step the graph is split and the part consisting of HLS layers is further processed in the FINN flow. The parent graph containing the non-HLS layers remains. The PE and SIMD are set to 1 by default, so the result is a network of only HLS layers with maximum folding. The model can be verified using the *npysim* simulation. It is a simulation using C++ and is described in more detail in chapter :ref:`verification`. + +Folding +======= + +To adjust the folding, the values for PE and SIMD can be increased to achieve also an increase in the performance. The result can be verified using the same simulation flow as for the network with maximum folding (*npysim* using C++), for details please have a look at chapter :ref:`verification`. + +The result is a network of HLS layers with desired folding and it can be passed to :ref:`vivado_synth`. diff --git a/docs/finn/pynq_deploy.rst b/docs/finn/pynq_deploy.rst new file mode 100644 index 0000000000000000000000000000000000000000..70bd6d72194a255323c4ce13337789c37da5482a --- /dev/null +++ b/docs/finn/pynq_deploy.rst @@ -0,0 +1,36 @@ +.. _pynq_deploy: + +*************** +PYNQ Deployment +*************** + +.. note:: **This website is currently under construction.** + +.. image:: /img/pynq-deploy.png + :scale: 70% + :align: center + +This chapter is about the hardware generation and deployment on PYNQ. If you need more information about PYNQ, please have a look at the `PYNQ website <https://pynq.readthedocs.io/en/v2.5.1/>`_. + +Create PYNQ Shell Project +========================= + +To deploy the network on A PYNQ platform, it needs to be put inside an appropriate *shell*. This *shell* bridges the network with the interfaces the underlying system exposes. This can be done using the transformation MakePYNQProject, see :py:mod:`finn.transformation.fpgadataflow.make_pynq_proj.MakePYNQProject`. + +Test on Hardware +================ + +Synthesis, Place and Route +-------------------------- + +After integrating the model into the PYNQ shell, Vivado *Synthesis, Place and Route* can be launched. The result is a bitfile which can be used for the PYNQ board. In FINN this can be done using a transformation pass. For details, please have a look at :py:mod:`finn.transformation.fpgadataflow.synth_pynq_proj.SynthPYNQProject`. + +Generate PYNQ runtime code +-------------------------- + +Additionally, a Python code is necessary to execute the model on the board. This is done by transformation pass :py:mod:`finn.transformation.fpgadataflow.make_pynq_driver.MakePYNQDriver`. + +Deployment and Remote Execution +------------------------------- + +The bitfile and the driver file(s) are copied to the PYNQ board and can be executed there using the *onnx_exec* function with the right *exec_mode* settings. For details please have a look at transformation :py:mod:`finn.transformation.fpgadataflow.make_deployment.DeployToPYNQ` and the execution function :py:mod:`finn.core.onnx_exec`. diff --git a/docs/finn/source_code/finn.analysis.fpgadataflow.rst b/docs/finn/source_code/finn.analysis.fpgadataflow.rst new file mode 100644 index 0000000000000000000000000000000000000000..e31723ca3802e1ba523131ddff8078662c06d54b --- /dev/null +++ b/docs/finn/source_code/finn.analysis.fpgadataflow.rst @@ -0,0 +1,23 @@ +*********************** +Analysis - fpgadataflow +*********************** + +Analysis Passes (fpgadataflow) +============================== + +finn.analysis.fpgadataflow.hls\_synth\_res\_estimation +------------------------------------------------------ + +.. automodule:: finn.analysis.fpgadataflow.hls_synth_res_estimation + :members: + :undoc-members: + :show-inheritance: + +finn.analysis.fpgadataflow.res\_estimation +------------------------------------------ + +.. automodule:: finn.analysis.fpgadataflow.res_estimation + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/finn/source_code/finn.analysis.rst b/docs/finn/source_code/finn.analysis.rst new file mode 100644 index 0000000000000000000000000000000000000000..dbfeea2a9658957b2525016c387b7bc6aeb77608 --- /dev/null +++ b/docs/finn/source_code/finn.analysis.rst @@ -0,0 +1,34 @@ +******** +Analysis +******** + +If you want to know more details about analysis passes, please take a look at section ":ref:`analysis_pass`" in chapter *Internals*. + +Submodules +========== + +.. toctree:: + :maxdepth: 2 + + finn.analysis.fpgadataflow + +Analysis Passes +=============== + +finn.analysis.topology +---------------------- + +.. automodule:: finn.analysis.topology + :members: + :undoc-members: + :show-inheritance: + +finn.analysis.verify\_custom\_nodes +----------------------------------- + +.. automodule:: finn.analysis.verify_custom_nodes + :members: + :undoc-members: + :show-inheritance: + + diff --git a/docs/finn/source_code/finn.core.rst b/docs/finn/source_code/finn.core.rst new file mode 100644 index 0000000000000000000000000000000000000000..78b461ee69efb6cac59eb4e9c1dbd5abc521191d --- /dev/null +++ b/docs/finn/source_code/finn.core.rst @@ -0,0 +1,57 @@ +**** +Core +**** + +Modules +======= + +finn.core.datatype +------------------ + +.. automodule:: finn.core.datatype + :members: + :undoc-members: + :show-inheritance: + +finn.core.execute\_custom\_node +------------------------------- + +.. automodule:: finn.core.execute_custom_node + :members: + :undoc-members: + :show-inheritance: + +finn.core.modelwrapper +---------------------- + +If you want to have more details about the module ModelWrapper, please have a look at section ":ref:`modelwrapper`" in chapter *Internals*. + +.. automodule:: finn.core.modelwrapper + :members: + :undoc-members: + :show-inheritance: + +finn.core.onnx\_exec +-------------------- + +.. automodule:: finn.core.onnx_exec + :members: + :undoc-members: + :show-inheritance: + +finn.core.remote\_exec +---------------------- + +.. automodule:: finn.core.remote_exec + :members: + :undoc-members: + :show-inheritance: + +finn.core.rtlsim\_exec +---------------------- + +.. automodule:: finn.core.rtlsim_exec + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/finn/source_code/finn.custom_op.fpgadataflow.rst b/docs/finn/source_code/finn.custom_op.fpgadataflow.rst new file mode 100644 index 0000000000000000000000000000000000000000..f8c6274e40126a12f072b9c46c9e5748747f8121 --- /dev/null +++ b/docs/finn/source_code/finn.custom_op.fpgadataflow.rst @@ -0,0 +1,56 @@ +************************ +Custom Op - FPGADataFlow +************************ + +HLS Custom Op Nodes +=================== + +Base Class +---------- + +.. automodule:: finn.custom_op.fpgadataflow + :members: + :undoc-members: + :show-inheritance: + + +finn.custom\_op.fpgadataflow.convolutioninputgenerator +------------------------------------------------------ + +.. automodule:: finn.custom_op.fpgadataflow.convolutioninputgenerator + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.fpgadataflow.streamingfclayer\_batch +---------------------------------------------------- + +.. automodule:: finn.custom_op.fpgadataflow.streamingfclayer_batch + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.fpgadataflow.streamingmaxpool\_batch +---------------------------------------------------- + +.. automodule:: finn.custom_op.fpgadataflow.streamingmaxpool_batch + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.fpgadataflow.templates +-------------------------------------- + +.. automodule:: finn.custom_op.fpgadataflow.templates + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.fpgadataflow.tlastmarker +---------------------------------------- + +.. automodule:: finn.custom_op.fpgadataflow.tlastmarker + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/finn/source_code/finn.custom_op.rst b/docs/finn/source_code/finn.custom_op.rst new file mode 100644 index 0000000000000000000000000000000000000000..e0a2c77213be8bd5eca4dce67d48c6b5950be9ba --- /dev/null +++ b/docs/finn/source_code/finn.custom_op.rst @@ -0,0 +1,55 @@ +********* +Custom Op +********* + +Submodules +========== + +.. toctree:: + :maxdepth: 2 + + finn.custom_op.fpgadataflow + +Custom Op Nodes +=============== + +Base Class +---------- + +.. automodule:: finn.custom_op + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.multithreshold +------------------------------ + +.. automodule:: finn.custom_op.multithreshold + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.registry +------------------------ + +.. automodule:: finn.custom_op.registry + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.streamingdataflowpartition +------------------------------------------ + +.. automodule:: finn.custom_op.streamingdataflowpartition + :members: + :undoc-members: + :show-inheritance: + +finn.custom\_op.xnorpopcount +---------------------------- + +.. automodule:: finn.custom_op.xnorpopcount + :members: + :undoc-members: + :show-inheritance: + diff --git a/docs/finn/source_code/finn.rst b/docs/finn/source_code/finn.rst new file mode 100644 index 0000000000000000000000000000000000000000..1197c50a035b77ec24f51f9e95f6208db162db8e --- /dev/null +++ b/docs/finn/source_code/finn.rst @@ -0,0 +1,16 @@ +******** +FINN API +******** +The FINN sources are divided into different modules. They are listed below. + +Modules +======= + +.. toctree:: + :maxdepth: 1 + + finn.analysis + finn.core + finn.custom_op + finn.transformation + finn.util diff --git a/docs/finn/source_code/finn.transformation.fpgadataflow.rst b/docs/finn/source_code/finn.transformation.fpgadataflow.rst new file mode 100644 index 0000000000000000000000000000000000000000..ba850d8fd3834ca86769925ddcf9cc958cdb4980 --- /dev/null +++ b/docs/finn/source_code/finn.transformation.fpgadataflow.rst @@ -0,0 +1,134 @@ +***************************** +Transformation - fpgadataflow +***************************** + +Transformations (fpgadataflow) +============================== + +finn.transformation.fpgadataflow.cleanup +---------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.cleanup + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.codegen\_ipgen +----------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.codegen_ipgen + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.codegen\_ipstitch +-------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.codegen_ipstitch + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.codegen\_npysim +------------------------------------------------ + +.. automodule:: finn.transformation.fpgadataflow.codegen_npysim + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.compile +---------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.compile + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.convert\_to\_hls\_layers +--------------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.convert_to_hls_layers + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.create\_dataflow\_partition +------------------------------------------------------------ + +.. automodule:: finn.transformation.fpgadataflow.create_dataflow_partition + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.hlssynth\_ipgen +------------------------------------------------ + +.. automodule:: finn.transformation.fpgadataflow.hlssynth_ipgen + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.insert\_tlastmarker +---------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.insert_tlastmarker + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.make\_deployment +------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.make_deployment + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.make\_pynq\_driver +--------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.make_pynq_driver + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.make\_pynq\_proj +------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.make_pynq_proj + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.replace\_verilog\_relpaths +----------------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.replace_verilog_relpaths + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.set\_exec\_mode +------------------------------------------------ + +.. automodule:: finn.transformation.fpgadataflow.set_exec_mode + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.synth\_pynq\_proj +-------------------------------------------------- + +.. automodule:: finn.transformation.fpgadataflow.synth_pynq_proj + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fpgadataflow.templates +------------------------------------------ + +.. automodule:: finn.transformation.fpgadataflow.templates + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/finn/source_code/finn.transformation.rst b/docs/finn/source_code/finn.transformation.rst new file mode 100644 index 0000000000000000000000000000000000000000..c944cb478303719bfd3392567b2b07f65c40edd0 --- /dev/null +++ b/docs/finn/source_code/finn.transformation.rst @@ -0,0 +1,65 @@ +************** +Transformation +************** + +If you want to know more details about transformation passes, please take a look at section ":ref:`transformation_pass`" in chapter *Internals*. + +Submodules +========== + +.. toctree:: + :maxdepth: 2 + + finn.transformation.fpgadataflow + finn.transformation.streamline + +Transformation Passes +===================== + +finn.transformation.batchnorm\_to\_affine +----------------------------------------- + +.. automodule:: finn.transformation.batchnorm_to_affine + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.bipolar\_to\_xnor +------------------------------------- + +.. automodule:: finn.transformation.bipolar_to_xnor + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.fold\_constants +----------------------------------- + +.. automodule:: finn.transformation.fold_constants + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.general +--------------------------- + +.. automodule:: finn.transformation.general + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.infer\_datatypes +------------------------------------ + +.. automodule:: finn.transformation.infer_datatypes + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.infer\_shapes +--------------------------------- + +.. automodule:: finn.transformation.infer_shapes + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/finn/source_code/finn.transformation.streamline.rst b/docs/finn/source_code/finn.transformation.streamline.rst new file mode 100644 index 0000000000000000000000000000000000000000..56d7f827213381edd260ff43eb36e32394c6d03d --- /dev/null +++ b/docs/finn/source_code/finn.transformation.streamline.rst @@ -0,0 +1,52 @@ +*************************** +Transformation - Streamline +*************************** + +Transformation (Streamline) +=========================== + +.. automodule:: finn.transformation.streamline + :members: + :undoc-members: + :show-inheritance: + + +finn.transformation.streamline.absorb +------------------------------------- + +.. automodule:: finn.transformation.streamline.absorb + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.streamline.collapse\_repeated +------------------------------------------------- + +.. automodule:: finn.transformation.streamline.collapse_repeated + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.streamline.reorder +-------------------------------------- + +.. automodule:: finn.transformation.streamline.reorder + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.streamline.round\_thresholds +------------------------------------------------ + +.. automodule:: finn.transformation.streamline.round_thresholds + :members: + :undoc-members: + :show-inheritance: + +finn.transformation.streamline.sign\_to\_thres +---------------------------------------------- + +.. automodule:: finn.transformation.streamline.sign_to_thres + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/finn/source_code/finn.util.rst b/docs/finn/source_code/finn.util.rst new file mode 100644 index 0000000000000000000000000000000000000000..5a86918195cd41dfc83e7faeb7923c5703f4fe42 --- /dev/null +++ b/docs/finn/source_code/finn.util.rst @@ -0,0 +1,46 @@ +**** +Util +**** + +Utility Modules +=============== + +finn.util.basic +--------------- + +.. automodule:: finn.util.basic + :members: + :undoc-members: + :show-inheritance: + +finn.util.data\_packing +----------------------- + +.. automodule:: finn.util.data_packing + :members: + :undoc-members: + :show-inheritance: + +finn.util.fpgadataflow +---------------------- + +.. automodule:: finn.util.fpgadataflow + :members: + :undoc-members: + :show-inheritance: + +finn.util.onnx +-------------- + +.. automodule:: finn.util.onnx + :members: + :undoc-members: + :show-inheritance: + +finn.util.test +-------------- + +.. automodule:: finn.util.test + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/finn/source_code/modules.rst b/docs/finn/source_code/modules.rst new file mode 100644 index 0000000000000000000000000000000000000000..dada6936c0d4e144acb991f32731b68e6995c23c --- /dev/null +++ b/docs/finn/source_code/modules.rst @@ -0,0 +1,7 @@ +finn +==== + +.. toctree:: + :maxdepth: 4 + + finn diff --git a/docs/finn/tutorials.rst b/docs/finn/tutorials.rst new file mode 100644 index 0000000000000000000000000000000000000000..2a234b15e61031ca994770c3590c013c4aaf5237 --- /dev/null +++ b/docs/finn/tutorials.rst @@ -0,0 +1,61 @@ +.. _tutorials: + +********* +Tutorials +********* + +.. note:: **This website is currently under construction.** + +FINN provides several Jupyter notebooks that can help to get familiar with the basics, the internals and the end-to-end flow in FINN. All Jupyter notebooks can be found in the repo in the `notebook folder <https://github.com/Xilinx/finn/tree/dev/notebooks>`_. + +Basics +====== + +The notebooks in this folder should give a basic insight into FINN, how to get started and the basic concepts. + +* `0_getting_started <https://github.com/Xilinx/finn/blob/master/notebooks/basics/0_getting_started.ipynb>`_ + + * This notebook corresponds to the chapter :ref:`getting_started` and gives an overview how to start working with FINN. + +* `1_how_to_work_with_onnx <https://github.com/Xilinx/finn/blob/master/notebooks/basics/1_how_to_work_with_onnx.ipynb>`_ + + * This notebook can help you to learn how to create and manipulate a simple ONNX model, also by using FINN + +* `2_modelwrapper <https://github.com/Xilinx/finn/blob/master/notebooks/basics/2_modelwrapper.ipynb>`_ + + * This notebook corresponds to the section :ref:`modelwrapper` in the chapter about internals. + +* `3_brevitas_network_import <https://github.com/Xilinx/finn/blob/master/notebooks/basics/3_brevitas_network_import.ipynb>`_ + + * This notebook shows how to import a brevitas network and prepare it for the FINN flow. + +Internals +========= + +The notebooks in this folder are more developer oriented. They should help you to get familiar with the principles in FINN and how to add new content regarding these concepts. + +* `0_custom_analysis_pass <https://github.com/Xilinx/finn/blob/master/notebooks/internals/0_custom_analysis_pass.ipynb>`_ + + * This notebook explains what an analysis pass is and how to write one for FINN. + +* `1_custom_transformation_pass <https://github.com/Xilinx/finn/blob/master/notebooks/internals/1_custom_transformation_pass.ipynb>`_ + + * This notebook explains what a transformation pass is and how to write one for FINN. + +* `2_custom_op <https://github.com/Xilinx/finn/blob/master/notebooks/internals/2_custom_op.ipynb>`_ + + * This notebooks explains what a custom operation/node is and how to create one for FINN. + +* `3_verify_hls_custom_op <https://github.com/Xilinx/finn/blob/master/notebooks/internals/3_verify_hls_custom_op.ipynb>`_ + + * This notebook shows the functional verification flow for hls custom operations/nodes. + +End-to-End Flow +=============== + +This notebook shows the FINN end-to-end flow step by step using an example of a simple, binarized, fully-connected network trained on the MNIST data set. Starting with the brevitas export and taking this particular network all the way down to hardware by using a specific sequence of transformations. + +* `tfc_end2end_example <https://github.com/Xilinx/finn/blob/master/notebooks/end2end_example/tfc_end2end_example.ipynb>`_ + * This notebook takes the model step-by-step from a trained Brevitas net to a running FPGA bitfile. +* `tfc_end2end_verification <https://github.com/Xilinx/finn/blob/master/notebooks/end2end_example/tfc_end2end_verification.ipynb>`_ + * This notebook runs parellel to the tfc_end2end_example notebook above, and shows how the output of each step can be verified. diff --git a/docs/finn/verification.rst b/docs/finn/verification.rst new file mode 100644 index 0000000000000000000000000000000000000000..13e6fae7a22caf8fbab3d2569434e3cdc30d90e0 --- /dev/null +++ b/docs/finn/verification.rst @@ -0,0 +1,31 @@ +.. _verification: + +*********************** +Functional Verification +*********************** + +.. note:: **This website is currently under construction.** + +.. image:: /img/verification.png + :scale: 70% + :align: center + +"* This part of the flow is covered by the `this notebook <https://github.com/Xilinx/finn/tree/master/notebooks/end2end_example/tfc_end2end_verification.ipynb>`_. *" + +When the network is transformed it is important to verify the functionality to make sure the transformation did not change the behaviour of the model. There are multiple ways of verification that can be applied in different stages of the network inside FINN. All can be accessed using the execution function in module :py:mod:`finn.core.onnx_exec`. The execution happens in most cases node by node, which supports networks that have a mixture of standard ONNX nodes, custom nodes and HLS custom nodes. A single node can be executed using one or more of the following methods: + +Simulation using Python +======================= + +This simulation can be used right after the :ref:`brevitas_export` or when the network does not contain any HLS custom nodes, so right after the streamlining transformations and before the nodes are converted into HLS layers. + +Simulation using C++ +==================== + +This simulation can be used for a model containing several HLS custom operations. Because they are based on finn-hlslib function, C++ code can be generated from this single nodes and they can be executed by compiling the code and running the resulting executables. + + +Emulation using PyVerilator +=========================== + +The emulation using PyVerilator can be used when IP blocks were generated, either node by node or of a whole design. For that purpose PyVerilator gets the generated verilog files. diff --git a/docs/finn/vivado_synth.rst b/docs/finn/vivado_synth.rst new file mode 100644 index 0000000000000000000000000000000000000000..26a1f7a85284f2a438efcc05f593533da4cc8641 --- /dev/null +++ b/docs/finn/vivado_synth.rst @@ -0,0 +1,15 @@ +.. _vivado_synth: + +************************* +Vivado HLS and Vivado IPI +************************* + +.. note:: **This website is currently under construction.** + +.. image:: /img/vivado-synth.png + :scale: 70% + :align: center + +In this step the system is handed over to Vivado. To do this, IP blocks are created from each layer using Vivado HLS and then stitched together using Vivado IP Integrator. This creates a Vivado design of the entire network. The design can be verified using `PyVerilator <https://github.com/maltanar/pyverilator>`_ either on the network with the unstitched IP blocks or on the stitched IP. The generated verilog files are passed to PyVerilator and in this way the model can be emulated. This procedure is called *rtlsim* in FINN flow and details can be found in the chapter :ref:`verification`. + +Once the model is in the form of a stitched IP, it can be passed to the next flow step :ref:`pynq_deploy`. diff --git a/notebooks/FINN-CodeGenerationAndCompilation.ipynb b/notebooks/FINN-CodeGenerationAndCompilation.ipynb deleted file mode 100644 index df28989756bde4e1984bb18f94528ef935854f3c..0000000000000000000000000000000000000000 --- a/notebooks/FINN-CodeGenerationAndCompilation.ipynb +++ /dev/null @@ -1,953 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# FINN - Code Generation and Compilation\n", - "-----------------------------------------------------------------\n", - "<font size=\"3\">This notebook is about code generation and compilation to enable execution of FINN custom operation nodes. \n", - "\n", - "Following showSrc function is used to print the source code of function calls in the Jupyter notebook:</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import inspect\n", - "\n", - "def showSrc(what):\n", - " print(\"\".join(inspect.getsourcelines(what)[0]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Outline\n", - "-------------\n", - "* <font size=\"3\">Example model</font>\n", - "* <font size=\"3\">Code generation</font>\n", - "* <font size=\"3\">Compilation</font>\n", - "* <font size=\"3\">CustomOp node execution</font>\n", - "* <font size=\"3\">Conclusion</font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Example model\n", - "<font size=\"3\">To show the code generation and compilation of a node, an example model with a StreamingFCLayer_Batch node is first created. To learn more about FINN custom operation nodes, please take a look at notebook [FINN-CustomOps](FINN-CustomOps.ipynb).\n", - "\n", - "First TensorProto and helper are imported from ONNX. These can be used to create tensors, nodes, graphs and models in ONNX. Additional functions from `util` and the classes `DataType` and `ModelWrapper` are needed. More information about `DataType` and `ModelWrapper` can be found in Jupyter notebook [FINN-ModelWrapper](FINN-ModelWrapper.ipynb).</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from onnx import TensorProto, helper\n", - "import finn.core.utils as util\n", - "from finn.core.datatype import DataType\n", - "from finn.core.modelwrapper import ModelWrapper" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">Then all parameters, that are needed to create a StreamingFCLayer_Batch node, are set. To keep the example clear small values are chosen. For more information about the parameters please take a look at the documentation of the [finn-hls library](https://finn-hlslib.readthedocs.io/en/latest/).</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "idt = wdt = odt = DataType.BIPOLAR\n", - "mw = 8\n", - "mh = 8\n", - "pe = 4\n", - "simd = 4\n", - "nf = mh // pe\n", - "sf = mw // simd\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">A `tensor_value_info` is created for all tensors involved. In this case there is one tensor for the weights besides the input and output tensors. Then an input list is created containing the two inputs (`\"inp\"`and `\"weights\"`).\n", - "\n", - "**Note**: A StreamingFCLayer_Batch node can also have an output activation which is passed in the form of thresholds as input tensor</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "inp = helper.make_tensor_value_info(\"inp\", TensorProto.FLOAT, [1, sf, simd])\n", - "weights = helper.make_tensor_value_info(\"weights\", TensorProto.FLOAT, [mw, mh])\n", - "outp = helper.make_tensor_value_info(\"outp\", TensorProto.FLOAT, [1, nf, pe])\n", - "node_inp_list = [\"inp\", \"weights\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">Now the node can be created. The operation type is set to `\"StreamingFCLayer_Batch\"` and the rest of the attributes are set appropriately. The relevant attributes for the activation of the code generation and compilation are:</font>\n", - "* <font size=\"3\">**`domain=\"finn\"`**: specifies that the created node is a FINN-Custom Op</font>\n", - "* <font size=\"3\">**`backend=\"fpgadataflow\"`**: specifies that it is a node that corresponds to a function in the finn-hls library</font>\n", - "* <font size=\"3\">**`code_gen_dir\"`**: specifies the path to the directory where the generated c++ files are (is set during code generation)</font>\n", - "* <font size=\"3\">**`executable_path\"`**: specifies the path to the executable created after compilation (is set during compilation)</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "FCLayer_node = helper.make_node(\n", - " \"StreamingFCLayer_Batch\",\n", - " node_inp_list,\n", - " [\"outp\"],\n", - " domain=\"finn\",\n", - " backend=\"fpgadataflow\",\n", - " code_gen_dir=\"\",\n", - " executable_path=\"\",\n", - " resType=\"ap_resource_lut()\",\n", - " MW=mw,\n", - " MH=mh,\n", - " SIMD=simd,\n", - " PE=pe,\n", - " noActivation=1,\n", - " binaryXnorMode=1,\n", - " inputDataType=idt.name,\n", - " weightDataType=wdt.name,\n", - " outputDataType=odt.name,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\"> The node is packed into a graph environment and the inputs and outputs are set.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "graph = helper.make_graph(\n", - " nodes=[FCLayer_node], name=\"fclayer_graph\", inputs=[inp], outputs=[outp]\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">A model is now created from the graph, which is then converted into a ModelWrapper object for further processing in FINN. Afterwards the ModelWrapper internal functions can be used to set the FINN data types and the initializer for the weights. Since this is an example, the weights are not taken from the training, but random values are generated using the utility function `gen_finn_dt_tensor()`. This function gets a FINN datatype and a shape and generates a tensor with values of this datatype in the desired shape.</font>\n" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "model = helper.make_model(graph, producer_name=\"fclayer-model\")\n", - "model = ModelWrapper(model)\n", - "\n", - "model.set_tensor_datatype(\"inp\", idt)\n", - "model.set_tensor_datatype(\"outp\", odt)\n", - "model.set_tensor_datatype(\"weights\", wdt)\n", - "W = util.gen_finn_dt_tensor(wdt, (mw, mh))\n", - "model.set_initializer(\"weights\", W)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The model is saved and then netron is used to visualize the resulting model. </font>" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "model.save(\"FCLayer_graph.onnx\")" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Serving 'FCLayer_graph.onnx' at http://0.0.0.0:8081\n" - ] - } - ], - "source": [ - "import netron\n", - "netron.start('FCLayer_graph.onnx', port=8081, host=\"0.0.0.0\")" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>\n" - ], - "text/plain": [ - "<IPython.core.display.HTML object>" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "%%html\n", - "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Code Generation\n", - "<font size=\"3\">Code generation is a transformation pass that can be applied to the model. For more information about transformation passes, see Jupyter Notebook [FINN-HowToTransformPass](FINN-HowToTransformPass.ipynb).\n", - "\n", - "The code generation transformation is shown below.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "class CodeGen(Transformation):\n", - " \"\"\"Code generation for all nodes in model\"\"\"\n", - "\n", - " def apply(self, model):\n", - " for node in model.graph.node:\n", - " if node.domain == \"finn\":\n", - " backend_attribute = get_by_name(node.attribute, \"backend\")\n", - " if backend_attribute is None:\n", - " continue\n", - " backend_value = backend_attribute.s.decode(\"UTF-8\")\n", - " if backend_value == \"fpgadataflow\":\n", - " _codegen_single_node(node, model)\n", - " return (model, False)\n", - "\n" - ] - } - ], - "source": [ - "from finn.transformation.fpgadataflow.codegen import CodeGen\n", - "showSrc(CodeGen)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The transformation pass iterates over all nodes in the model and if `domain=\"finn\"` and `backend=\"fpgadataflow\"` is True, the function `_codegen_single_node()` is executed which is also part of the transformation pass and is shown below. </font>" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "def _codegen_single_node(node, model):\n", - " \"\"\"Call custom implementation to generate code for single custom node\n", - " and create folder that contains all the generated files\"\"\"\n", - " op_type = node.op_type\n", - " try:\n", - " # lookup op_type in registry of CustomOps\n", - " inst = registry.custom_op[op_type](node)\n", - " # get the path of the code generation directory\n", - " code_gen_dir = inst.get_nodeattr(\"code_gen_dir\")\n", - " # ensure that there is a directory\n", - " if code_gen_dir == \"\" or not os.path.isdir(code_gen_dir):\n", - " code_gen_dir = tmp.mkdtemp(prefix=\"code_gen_\" + str(node.op_type) + \"_\")\n", - " inst.set_nodeattr(\"code_gen_dir\", code_gen_dir)\n", - " # ensure that there is generated code inside the dir\n", - " inst.code_generation(model)\n", - " except KeyError:\n", - " # exception if op_type is not supported\n", - " raise Exception(\"Custom op_type %s is currently not supported.\" % op_type)\n", - "\n" - ] - } - ], - "source": [ - "from finn.transformation.fpgadataflow.codegen import _codegen_single_node\n", - "showSrc(_codegen_single_node)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">An instance of the node is created and checked for the attribute `code_gen_dir`. If the attribute is not set, a temporary directory is created and the attribute is set accordingly. \n", - "\n", - "Then the `code_generation()` function of the instance is called. If an error occurs during this process, this is probably due to the fact that the selected CustomOp is not yet supported. The following description of the code generation within the CustomOp instance may lead to overlaps with the Jupyter notebook [FINN-CustomOps](FINN-CustomOps.ipynb).\n", - "\n", - "In order to clarify the individual components involved in code generation, an instance of the node is first created, as in the `_codegen_single_node` function. This is done by looking up the op_type in the [registry](https://github.com/Xilinx/finn/blob/dev/src/finn/custom_op/registry.py) of CustomOps. The instance contains a template for code generation which is shown below.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - " #include \"cnpy.h\"\n", - " #include \"npy2apintstream.hpp\"\n", - " #include <vector>\n", - " #include \"bnn-library.h\"\n", - "\n", - " // includes for network parameters\n", - " $GLOBALS$\n", - "\n", - " // defines for network parameters\n", - " $DEFINES$\n", - "\n", - " int main(){\n", - "\n", - " $STREAMDECLARATIONS$\n", - "\n", - " $READNPYDATA$\n", - "\n", - " $DOCOMPUTE$\n", - "\n", - " $DATAOUTSTREAM$\n", - "\n", - " $SAVEASCNPY$\n", - "\n", - " }\n", - "\n", - " \n" - ] - } - ], - "source": [ - "import finn.custom_op.registry as registry\n", - "node = FCLayer_node\n", - "op_type = FCLayer_node.op_type\n", - "inst = registry.custom_op[op_type](node)\n", - "print(inst.docompute_template)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The template has some general constructs, like the inclusion of bnn-library.h, which contains the references to the finn-hls library, and of cnpy.h and npy2apintstream.hpp, which support the transfer of python numpy arrays in c++. The idea of this template is to replace the variables marked with `$ $` with c++ calls during code generation. Then the template can be written into a .cpp file and be compiled. \n", - "\n", - "The sub-functions that are called during code generation are shown below.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " def code_generation(self, model):\n", - " node = self.onnx_node\n", - " self.generate_params(model)\n", - " self.global_includes()\n", - " self.defines()\n", - " self.read_npy_data()\n", - " self.strm_decl()\n", - " self.docompute()\n", - " self.dataoutstrm()\n", - " self.save_as_npy()\n", - "\n", - " template = self.docompute_template\n", - "\n", - " for key in self.code_gen_dict:\n", - " # transform list into long string separated by '\\n'\n", - " code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n", - " template = template.replace(key, code_gen_line)\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", - " f = open(os.path.join(code_gen_dir, \"execute_{}.cpp\".format(node.op_type)), \"w\")\n", - " f.write(template)\n", - " f.close()\n", - "\n" - ] - } - ], - "source": [ - "from finn.custom_op.fpgadataflow.streamingfclayer_batch import StreamingFCLayer_Batch\n", - "showSrc(StreamingFCLayer_Batch.code_generation)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">Except for the function `generate_params(model)` all functions needed to fill the template correspond to the `$ $` variable names, i.e. function `defines()` returns the part of the c++ code that replaces `$DEFINES$` in the template. The individual functions are member functions of the class HLSCustomOp and are defined in each CustomOp. The code for a StreamingFCLayer_Batch node can be looked up in the [code](https://github.com/Xilinx/finn/blob/dev/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py).</font> " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">A special function for code generation for the StreamingFCLayer_Batch node is the `generate_params(model)` function. Besides the normal input tensor, an fc layer has weight values as input and can get additional thresholds for activation. This function reads the values for the weights and thresholds via the `get_initializer` function of the ModelWrapper and writes them c++ conform in .h files, which are added to the includes. \n", - "\n", - "The `generate_params` function of the StreamingFCLayer_Batch is shown below.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " def generate_params(self, model):\n", - " # weights\n", - " weights = model.get_initializer(self.onnx_node.input[1])\n", - " # convert weights into hlslib-compatible format\n", - " weight_tensor = self.get_hls_compatible_weight_tensor(weights)\n", - " export_wdt = self.get_weight_datatype()\n", - " # we have converted bipolar weights to binary for export,\n", - " # so use it as such for weight generation\n", - " if self.get_weight_datatype() == DataType.BIPOLAR:\n", - " export_wdt = DataType.BINARY\n", - " weight_hls_code = numpy_to_hls_code(\n", - " weight_tensor, export_wdt, \"weights\", True, True\n", - " )\n", - " # write weights into params.h\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", - " f_weights = open(\"{}/params.h\".format(code_gen_dir), \"w\")\n", - "\n", - " if export_wdt.bitwidth() != 1:\n", - " f_weights.write(\n", - " \"static FixedPointWeights<{},{},{},{}> weights = \".format(\n", - " self.get_nodeattr(\"SIMD\"),\n", - " export_wdt.get_hls_datatype_str(),\n", - " self.get_nodeattr(\"PE\"),\n", - " self.calc_wmem(),\n", - " )\n", - " )\n", - " else:\n", - " f_weights.write(\n", - " \"static BinaryWeights<{},{},{}> weights = \".format(\n", - " self.get_nodeattr(\"SIMD\"), self.get_nodeattr(\"PE\"), self.calc_wmem()\n", - " )\n", - " )\n", - " f_weights.write(weight_hls_code)\n", - " f_weights.close()\n", - " # thresholds\n", - " if len(self.onnx_node.input) > 2:\n", - " thresholds = model.get_initializer(self.onnx_node.input[2])\n", - " if thresholds is not None:\n", - " threshold_tensor = self.get_hls_compatible_threshold_tensor(thresholds)\n", - " tdt = DataType.INT32\n", - " # use UINT32 threshold export for bipolar times bipolar\n", - " inp_is_bipolar = self.get_input_datatype() == DataType.BIPOLAR\n", - " wt_is_bipolar = self.get_weight_datatype() == DataType.BIPOLAR\n", - " # reinterpret inp/wt as bipolar if bin_xnor_mode is iset\n", - " inp_is_binary = self.get_input_datatype() == DataType.BINARY\n", - " wt_is_binary = self.get_weight_datatype() == DataType.BINARY\n", - " bin_xnor_mode = self.get_nodeattr(\"binaryXnorMode\") == 1\n", - " inp_is_bipolar = inp_is_bipolar or (inp_is_binary and bin_xnor_mode)\n", - " wt_is_bipolar = wt_is_bipolar or (wt_is_binary and bin_xnor_mode)\n", - " if inp_is_bipolar and wt_is_bipolar:\n", - " tdt = DataType.UINT32\n", - " thresholds_hls_code = numpy_to_hls_code(\n", - " threshold_tensor, tdt, \"thresholds\", False, True\n", - " )\n", - " # write thresholds into thresh.h\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", - " f_thresh = open(\"{}/thresh.h\".format(code_gen_dir), \"w\")\n", - " tdt_hls = tdt.get_hls_datatype_str()\n", - " # use binary to export bipolar activations\n", - " export_odt = self.get_output_datatype()\n", - " if self.get_output_datatype() == DataType.BIPOLAR:\n", - " export_odt = DataType.BINARY\n", - " odt_hls = export_odt.get_hls_datatype_str()\n", - " f_thresh.write(\n", - " \"static ThresholdsActivation<{},{},{},{},{},{},{}> threshs \\\n", - " = \".format(\n", - " self.calc_tmem(),\n", - " self.get_nodeattr(\"PE\"),\n", - " threshold_tensor.shape[-1],\n", - " tdt_hls,\n", - " odt_hls,\n", - " self.get_nodeattr(\"ActVal\"),\n", - " \"std::less_equal<%s>\" % tdt_hls,\n", - " )\n", - " )\n", - " f_thresh.write(thresholds_hls_code)\n", - " f_thresh.close()\n", - "\n" - ] - } - ], - "source": [ - "showSrc(StreamingFCLayer_Batch.generate_params)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The generated code is written to the previously created temporary directory and the node attribute `code_gen_dir` is set. This completes the code generation for executing a single CustomOp. The next step is compilation. </font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Compilation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The compilation is a transformation pass like the code generation. The code of this transformation is shown below. </font>" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "class Compile(Transformation):\n", - " \"\"\"Compile for all nodes in model\"\"\"\n", - "\n", - " def __init__(self):\n", - " super().__init__()\n", - "\n", - " def apply(self, model):\n", - " for node in model.graph.node:\n", - " op_type = node.op_type\n", - " if node.domain == \"finn\":\n", - " backend_attribute = util.get_by_name(node.attribute, \"backend\")\n", - " if backend_attribute is None:\n", - " continue\n", - " backend_value = backend_attribute.s.decode(\"UTF-8\")\n", - " if backend_value == \"fpgadataflow\":\n", - " try:\n", - " # lookup op_type in registry of CustomOps\n", - " inst = registry.custom_op[op_type](node)\n", - " # ensure that code is generated\n", - " assert inst.get_nodeattr(\"code_gen_dir\") != \"\"\n", - " # call the compilation function for this node\n", - " inst.compile_singlenode_code()\n", - " # ensure that executable path is now set\n", - " assert inst.get_nodeattr(\"executable_path\") != \"\"\n", - " except KeyError:\n", - " # exception if op_type is not supported\n", - " raise Exception(\n", - " \"Custom op_type %s is currently not supported.\" % op_type\n", - " )\n", - " return (model, False)\n", - "\n" - ] - } - ], - "source": [ - "from finn.transformation.fpgadataflow.compile import Compile\n", - "showSrc(Compile)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The scheme resembles that of the code generation transformation pass. The pass iterates over all nodes in the model and if `domain=\"finn\"` and `backend=\"fpgadataflow\"` is True, the compilation is activated for that node. First an instance of the node is created and checked whether the code was generated. For this the node attribute `code_gen_dir` is checked. If it exists, the function `compile_singlenode_code()` can be executed. Then it is checked whether the path to the executable has been set. There is an exception if the custom op_type is not supported. \n", - "\n", - "The actual compilation is done with the function `compile_singlenode_code()`. What happens inside the function is shown below.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " def compile_singlenode_code(self):\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", - " builder = CppBuilder()\n", - " builder.append_includes(\"-I/workspace/finn/src/finn/data/cpp\")\n", - " builder.append_includes(\"-I/workspace/cnpy/\")\n", - " builder.append_includes(\"-I/workspace/finn-hlslib\")\n", - " builder.append_includes(\"-I/workspace/vivado-hlslib\")\n", - " builder.append_includes(\"--std=c++11\")\n", - " builder.append_sources(code_gen_dir + \"/*.cpp\")\n", - " builder.append_sources(\"/workspace/cnpy/cnpy.cpp\")\n", - " builder.append_includes(\"-lz\")\n", - " builder.set_executable_path(code_gen_dir + \"/node_model\")\n", - " builder.build(code_gen_dir)\n", - " self.set_nodeattr(\"executable_path\", builder.executable_path)\n", - "\n" - ] - } - ], - "source": [ - "showSrc(StreamingFCLayer_Batch.compile_singlenode_code)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">To execute the compilation the class `CppBuilder` from `core.utils` is used. Subsequently the member functions of this class are used to construct the g++ command. To better understand the exact procedure the class `CppBuilder` is shown below. </font>" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "class CppBuilder:\n", - " def __init__(self):\n", - " self.include_paths = []\n", - " self.cpp_files = []\n", - " self.executable_path = \"\"\n", - " self.code_gen_dir = \"\"\n", - " self.compile_components = []\n", - " self.compile_script = \"\"\n", - "\n", - " def append_includes(self, library_path):\n", - " self.include_paths.append(library_path)\n", - "\n", - " def append_sources(self, cpp_file):\n", - " self.cpp_files.append(cpp_file)\n", - "\n", - " def set_executable_path(self, path):\n", - " self.executable_path = path\n", - "\n", - " def build(self, code_gen_dir):\n", - " # raise error if includes are empty\n", - " self.code_gen_dir = code_gen_dir\n", - " self.compile_components.append(\"g++ -o \" + str(self.executable_path))\n", - " for cpp_file in self.cpp_files:\n", - " self.compile_components.append(cpp_file)\n", - " for lib in self.include_paths:\n", - " self.compile_components.append(lib)\n", - " bash_compile = \"\"\n", - " for component in self.compile_components:\n", - " bash_compile += str(component) + \" \"\n", - " self.compile_script = str(self.code_gen_dir) + \"/compile.sh\"\n", - " with open(self.compile_script, \"w\") as f:\n", - " f.write(\"#!/bin/bash \\n\")\n", - " f.write(bash_compile + \"\\n\")\n", - " bash_command = [\"bash\", self.compile_script]\n", - " process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)\n", - " process_compile.communicate()\n", - "\n" - ] - } - ], - "source": [ - "showSrc(util.CppBuilder)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The class contains several member variables needed to execute the compilation command. These are reset when instantiating the class. The following functions are to fill these variables and in the build function, everything is combined into one compile command, which is then executed using the python library `subprocess`. \n", - " \n", - "After the executables have been created, the `compile_singlenode_code` function sets the `executable_path` node attribute.</font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">This flow is needed for the execution of a single CustomOp node. The execution itself is represented in function `execute_node` of the respective node class. The last part of this Jupyter notebook is about this function.</font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### CustomOp node execution" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">The function `execute_node` of StreamingFCLayer_Batch is displayed below. The class HLSCustomOp also has an `execute_node` function, which contains the basic principle of the execution. However, for the StreamingFcLayer_Batch node further transformations are necessary. </font>" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " def execute_node(self, context, graph):\n", - " node = self.onnx_node\n", - " mw = self.get_nodeattr(\"MW\")\n", - " mh = self.get_nodeattr(\"MH\")\n", - " simd = self.get_nodeattr(\"SIMD\")\n", - " pe = self.get_nodeattr(\"PE\")\n", - " sf = mw // simd\n", - " nf = mh // pe\n", - "\n", - " # TODO ensure codegen dir exists\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", - " # create a npy file fore each input of the node (in_ind is input index)\n", - " in_ind = 0\n", - " for inputs in node.input:\n", - " # it is assumed that the first input of the node is the data input\n", - " # the second input are the weights\n", - " # the third input are the thresholds\n", - " if in_ind == 0:\n", - " assert str(context[inputs].dtype) == \"float32\"\n", - " expected_inp_shape = (1, sf, simd)\n", - " reshaped_input = context[inputs].reshape(expected_inp_shape)\n", - " # flip SIMD (innermost) dimension of input tensor, there's some reversal\n", - " # going on somewhere with a mistmatch between npy and hls...\n", - " reshaped_input = np.flip(reshaped_input, -1)\n", - " if self.get_input_datatype() == DataType.BIPOLAR:\n", - " # store bipolar activations as binary\n", - " reshaped_input = (reshaped_input + 1) / 2\n", - " np.save(\n", - " os.path.join(code_gen_dir, \"input_{}.npy\".format(in_ind)),\n", - " reshaped_input,\n", - " )\n", - " elif in_ind > 2:\n", - " raise Exception(\"Unexpected input found for StreamingFCLayer\")\n", - " in_ind += 1\n", - " # execute the precompiled model\n", - " super().exec_precompiled_singlenode_model()\n", - " # load output npy file\n", - " super().npy_to_dynamic_output(context)\n", - " # reinterpret binary output as bipolar where needed\n", - " if self.get_output_datatype() == DataType.BIPOLAR:\n", - " out = context[node.output[0]]\n", - " out = 2 * out - 1\n", - " context[node.output[0]] = out\n", - " assert context[node.output[0]].shape == (1, nf, pe)\n", - " # reshape output to have expected shape\n", - " context[node.output[0]] = context[node.output[0]].reshape(1, mh)\n", - "\n" - ] - } - ], - "source": [ - "showSrc(StreamingFCLayer_Batch.execute_node)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">First, all parameters are extracted using the `get_nodeattr` function. It is also important to read the code generation via `code_gen_dir`. `execute_node` is divided into three parts:</font>\n", - "* <font size=\"3\">creation of a npy file for each input of the node</font>\n", - "* <font size=\"3\">execution of the precompiled model</font>\n", - "* <font size=\"3\">loading the output npy file</font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Creation of a npy file for each input of the node\n", - "\n", - "<font size=\"3\">To transfer the input values correctly to the c++ model, the input tensor has to be reshaped and the innermost dimension (SIMD) has to be flipped. Afterwards the tensor can be stored in a .npy file. \n", - "\n", - "Since the StreamingFcLayer_Batch node only has a maximum of three inputs (input, weights, thresholds), an error will be thrown if this number is exceeded. The weights and thresholds have already been written to separate .h files and therefore only the input tensor has to be stored in a .npy file. </font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Execution of the precompiled model\n", - "<font size=\"3\">The function from class HLSCustomOp is used here. It is shown below.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " def exec_precompiled_singlenode_model(self):\n", - " # execute precompiled executable\n", - " executable_path = self.get_nodeattr(\"executable_path\")\n", - " if executable_path == \"\":\n", - " raise Exception(\n", - " \"\"\"\n", - "Found no executable for this node, did you run the codegen and\n", - "compilation transformations?\n", - " \"\"\"\n", - " )\n", - " process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE)\n", - " process_execute.communicate()\n", - "\n" - ] - } - ], - "source": [ - "showSrc(StreamingFCLayer_Batch.exec_precompiled_singlenode_model)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">After checking that the attribute `executable_path` is not empty, the executable is executed via `subprocess`. The output is written from the c++ code into a .npy file, which can be read later on. </font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Loading the output npy file\n", - "\n", - "<font size=\"3\">To load the output data the function `npy_to_dynamic_output` is used. It is shown below. </font>" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " def npy_to_dynamic_output(self, context):\n", - " # TODO support multi-output nodes as needed\n", - " node = self.onnx_node\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", - " output = np.load(\"{}/output.npy\".format(code_gen_dir))\n", - " context[node.output[0]] = output\n", - "\n" - ] - } - ], - "source": [ - "showSrc(StreamingFCLayer_Batch.npy_to_dynamic_output)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "<font size=\"3\">Since the output file is stored in the same directory as the generated code, the attribute `code_gen_dir` is read first and then the output data is loaded using the numpy function `.load`. The context is set accordingly. \n", - "\n", - "Finally, the output data is manipulated in the `execute_node` function. If the data is bipolar, it is converted into binary data for further processing. Then the shape of the tensor is checked and converted into the expected output shape.\n", - "</font>" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Conclusion\n", - "\n", - "<font size=\"3\">Code generation and compilation are transformation passes that must be applied before a node can be executed. They are independent of the execution of a node and can be used for further development to enable functions such as code generation for synthesis or larger models. \n", - "\n", - "All files belonging to the code generation and compilation are stored in the directory which is specified in `code_gen_dir`.\n", - "\n", - "**Important**: If the code is executed inside the docker container, the directory will be deleted after closing the container. \n", - " \n", - "For further reading please see the /tests folder of the FINN repo. The subfolder /fpgadataflow contains for example: [test_fpgadataflow_fclayer](https://github.com/Xilinx/finn/blob/dev/tests/fpgadataflow/test_fpgadataflow_fclayer.py) which tests the functionality of the flow described above.</font>" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/finn-basics.ipynb b/notebooks/basics/0_getting_started.ipynb similarity index 85% rename from notebooks/finn-basics.ipynb rename to notebooks/basics/0_getting_started.ipynb index 57f8e5947dfb2ff8525cc0691cc6dd41e15c4bf7..07b2a2ba6d5a21be15de5c4061500d83b2aefdf3 100644 --- a/notebooks/finn-basics.ipynb +++ b/notebooks/basics/0_getting_started.ipynb @@ -72,13 +72,13 @@ "\n", "FINN uses ONNX is a specific way that we refer to as FINN-ONNX, and not all ONNX graphs are supported by FINN (and vice versa). Here is a list of key points to keep in mind:\n", "\n", - "* *Custom quantization annotations but data stored as float.* ONNX does not support datatypes smaller than 8-bit integers, whereas in FINN we are interested in smaller integers down to ternary and bipolar. To make this work, FINN uses the `quantization_annotation` field in ONNX to annotate tensors with their [FINN DataType](https://github.com/Xilinx/finn/blob/dev/src/finn/core/datatype.py) information. However, all tensors are expected to use single-precision floating point (float32) storage in FINN. This means we store even a 1-bit value as floating point for the purposes of representation. The FINN compiler flow is responsible for eventually producing a packed representation for the target hardware, where the 1-bit is actually stored as 1-bit.\n", + "* *Custom quantization annotations but data stored as float.* ONNX does not support datatypes smaller than 8-bit integers, whereas in FINN we are interested in smaller integers down to ternary and bipolar. To make this work, FINN uses the `quantization_annotation` field in ONNX to annotate tensors with their [FINN DataType](https://github.com/Xilinx/finn/blob/master/src/finn/core/datatype.py) information. However, all tensors are expected to use single-precision floating point (float32) storage in FINN. This means we store even a 1-bit value as floating point for the purposes of representation. The FINN compiler flow is responsible for eventually producing a packed representation for the target hardware, where the 1-bit is actually stored as 1-bit.\n", "\n", "* *Custom operations/nodes.* FINN uses many custom operations (`op_type` in ONNX NodeProto) that are not defined in the ONNX operator schema. These custom nodes are marked with `domain=\"finn\"` in the protobuf to identify them as such. These nodes can represent specific operations that we need for low-bit networks, or operations that are specific to a particular hardware backend.\n", "\n", - "* *Custom ONNX execution flow* To verify correct operation of FINN-ONNX graphs, FINN provides its own [ONNX execution flow](https://github.com/Xilinx/finn/blob/dev/src/finn/core/onnx_exec.py). This flow supports the standard set of ONNX operations as well as the custom FINN operations. *Important:* this execution flow is *only* meant for checking the correctness of models after applying transformations, and *not* for high performance inference. \n", + "* *Custom ONNX execution flow* To verify correct operation of FINN-ONNX graphs, FINN provides its own [ONNX execution flow](https://github.com/Xilinx/finn/blob/master/src/finn/core/onnx_exec.py). This flow supports the standard set of ONNX operations as well as the custom FINN operations. *Important:* this execution flow is *only* meant for checking the correctness of models after applying transformations, and *not* for high performance inference. \n", "\n", - "* *ModelWrapper* FINN provides a [`ModelWrapper`](https://github.com/Xilinx/finn/blob/dev/src/finn/core/modelwrapper.py) class as a thin wrapper around ONNX to make it easier to analyze and manipulate ONNX graphs. This wrapper provides many helper functions, while still giving full access to the ONNX protobuf representation. \n", + "* *ModelWrapper* FINN provides a [`ModelWrapper`](https://github.com/Xilinx/finn/blob/master/src/finn/core/modelwrapper.py) class as a thin wrapper around ONNX to make it easier to analyze and manipulate ONNX graphs. This wrapper provides many helper functions, while still giving full access to the ONNX protobuf representation. \n", "\n", "[Netron](https://lutzroeder.github.io/netron/) is very useful for visualizing ONNX models, including FINN-ONNX models." ] @@ -89,9 +89,9 @@ "source": [ "## More FINN Resources\n", "\n", - "* **[List of publications](https://github.com/Xilinx/finn/blob/dev/docs/publications.md)**\n", + "* **[List of publications](https://github.com/Xilinx/finn/blob/master/docs/publications.md)**\n", "* **[Roadmap](https://github.com/Xilinx/finn/projects/1)**\n", - "* **[Status of example networks](https://github.com/Xilinx/finn/blob/dev/docs/example-networks.md)**\n", + "* **[Status of example networks](https://github.com/Xilinx/finn/blob/master/docs/example-networks.md)**\n", "\n", "\n", "\n" diff --git a/notebooks/basics/1_how_to_work_with_onnx.ipynb b/notebooks/basics/1_how_to_work_with_onnx.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..29b2751aff73706d5590c6641b86104368816922 --- /dev/null +++ b/notebooks/basics/1_how_to_work_with_onnx.ipynb @@ -0,0 +1,785 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# FINN - How to work with ONNX\n", + "\n", + "This notebook should give an overview of ONNX ProtoBuf, help to create and manipulate an ONNX model and use FINN functions to work with it. There may be overlaps to other notebooks, like [ModelWrapper](2_modelwrapper.ipynb) and [CustomOps](../internals/2_custom_op.ipynb), but this notebook will give an overview about the handling of ONNX models in FINN." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Outline\n", + "* #### How to create a simple model\n", + "* #### How to manipulate an ONNX model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### How to create a simple model\n", + "\n", + "To explain how to create an ONNX model a simple example with mathematical operations is used. All nodes are from the [standard operations library of ONNX](https://github.com/onnx/onnx/blob/master/docs/Operators.md).\n", + "\n", + "First ONNX is imported, then the helper function can be used to make a node." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import onnx\n", + "\n", + "Add1_node = onnx.helper.make_node(\n", + " 'Add',\n", + " inputs=['in1', 'in2'],\n", + " outputs=['sum1'],\n", + " name='Add1'\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first attribute of the node is the operation type. In this case it is `'Add'`, so it is an adder node. Then the input names are passed to the node and at the end a name is assigned to the output.\n", + " \n", + "For this example we want two other adder nodes, one abs node and the output shall be rounded so one round node is needed." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "Add2_node = onnx.helper.make_node(\n", + " 'Add',\n", + " inputs=['sum1', 'in3'],\n", + " outputs=['sum2'],\n", + " name='Add2',\n", + ")\n", + "\n", + "Add3_node = onnx.helper.make_node(\n", + " 'Add',\n", + " inputs=['abs1', 'abs1'],\n", + " outputs=['sum3'],\n", + " name='Add3',\n", + ")\n", + "\n", + "Abs_node = onnx.helper.make_node(\n", + " 'Abs',\n", + " inputs=['sum2'],\n", + " outputs=['abs1'],\n", + " name='Abs'\n", + ")\n", + "\n", + "Round_node = onnx.helper.make_node(\n", + " 'Round',\n", + " inputs=['sum3'],\n", + " outputs=['out1'],\n", + " name='Round',\n", + ")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The names of the inputs and outputs of the nodes give already an idea of the structure of the resulting graph. In order to integrate the nodes into a graph environment, the inputs and outputs of the graph have to be specified first. In ONNX all data edges are processed as tensors. So with the helper function tensor value infos are created for the input and output tensors of the graph. Float from ONNX is used as data type. " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "in1 = onnx.helper.make_tensor_value_info(\"in1\", onnx.TensorProto.FLOAT, [4, 4])\n", + "in2 = onnx.helper.make_tensor_value_info(\"in2\", onnx.TensorProto.FLOAT, [4, 4])\n", + "in3 = onnx.helper.make_tensor_value_info(\"in3\", onnx.TensorProto.FLOAT, [4, 4])\n", + "out1 = onnx.helper.make_tensor_value_info(\"out1\", onnx.TensorProto.FLOAT, [4, 4])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now the graph can be built. First all nodes are passed. Here it is to be noted that it requires a certain sequence. The nodes must be instantiated in their dependencies to each other. This means Add2 must not be listed before Add1, because Add2 depends on the result of Add1. A name is then assigned to the graph. This is followed by the inputs and outputs. \n", + "\n", + "`value_info` of the graph contains the remaining tensors within the graph. When creating the nodes we have already defined names for the inner data edges and now these are assigned tensors of the datatype float and a certain shape." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + " graph = onnx.helper.make_graph(\n", + " nodes=[\n", + " Add1_node,\n", + " Add2_node,\n", + " Abs_node,\n", + " Add3_node,\n", + " Round_node,\n", + " ],\n", + " name=\"simple_graph\",\n", + " inputs=[in1, in2, in3],\n", + " outputs=[out1],\n", + " value_info=[\n", + " onnx.helper.make_tensor_value_info(\"sum1\", onnx.TensorProto.FLOAT, [4, 4]),\n", + " onnx.helper.make_tensor_value_info(\"sum2\", onnx.TensorProto.FLOAT, [4, 4]),\n", + " onnx.helper.make_tensor_value_info(\"abs1\", onnx.TensorProto.FLOAT, [4, 4]),\n", + " onnx.helper.make_tensor_value_info(\"sum3\", onnx.TensorProto.FLOAT, [4, 4]),\n", + " ],\n", + " )\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Important**: In our example, the shape of the tensors does not change during the calculation. This is not always the case. So you have to make sure that you specify the shape correctly.\n", + "\n", + "Now a model can be created from the graph and saved using the `.save` function. The model is saved in .onnx format and can be reloaded with `onnx.load()`. This also means that you can easily share your own model in .onnx format with others." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "onnx_model = onnx.helper.make_model(graph, producer_name=\"simple-model\")\n", + "onnx.save(onnx_model, 'simple_model.onnx')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To visualize the created model, [netron](https://github.com/lutzroeder/netron) can be used. Netron is a visualizer for neural network, deep learning and machine learning models." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Serving 'simple_model.onnx' at http://0.0.0.0:8081\n" + ] + } + ], + "source": [ + "import netron\n", + "netron.start('simple_model.onnx', port=8081, host=\"0.0.0.0\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%html\n", + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Netron also allows you to interactively explore the model. If you click on a node, the node attributes will be displayed. \n", + "\n", + "In order to test the resulting model, a function is first written in Python that calculates the expected output. Because numpy arrays are to be used, numpy is imported first." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "def expected_output(in1, in2, in3):\n", + " sum1 = np.add(in1, in2)\n", + " sum2 = np.add(sum1, in3)\n", + " abs1 = np.absolute(sum2)\n", + " sum3 = np.add(abs1, abs1)\n", + " return np.round(sum3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then the values for the three inputs are calculated. Random numbers are used." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "in1_values =np.asarray(np.random.uniform(low=-5, high=5, size=(4,4)), dtype=np.float32)\n", + "in2_values = np.asarray(np.random.uniform(low=-5, high=5, size=(4,4)), dtype=np.float32)\n", + "in3_values = np.asarray(np.random.uniform(low=-5, high=5, size=(4,4)), dtype=np.float32)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can easily pass the values to the function we just wrote to calculate the expected result. For the created model the inputs must be summarized in a dictionary, which is then passed on to the model." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "input_dict = {}\n", + "input_dict[\"in1\"] = in1_values\n", + "input_dict[\"in2\"] = in2_values\n", + "input_dict[\"in3\"] = in3_values" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To run the model and calculate the output, [onnxruntime](https://github.com/microsoft/onnxruntime) can be used. ONNX Runtime is a performance-focused complete scoring engine for Open Neural Network Exchange (ONNX) models from Microsoft. The `.InferenceSession` function is used to create a session of the model and `.run` is used to execute the model." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "import onnxruntime as rt\n", + "\n", + "sess = rt.InferenceSession(onnx_model.SerializeToString())\n", + "output = sess.run(None, input_dict)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The input values are also transferred to the reference function. Now the output of the execution of the model can be compared with that of the reference." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The output of the ONNX model is: \n", + "[[12. 9. 14. 8.]\n", + " [ 9. 9. 4. 6.]\n", + " [ 3. 19. 9. 5.]\n", + " [ 8. 22. 7. 2.]]\n", + "\n", + "The output of the reference function is: \n", + "[[12. 9. 14. 8.]\n", + " [ 9. 9. 4. 6.]\n", + " [ 3. 19. 9. 5.]\n", + " [ 8. 22. 7. 2.]]\n", + "\n", + "The results are the same!\n" + ] + } + ], + "source": [ + "ref_output= expected_output(in1_values, in2_values, in3_values)\n", + "print(\"The output of the ONNX model is: \\n{}\".format(output[0]))\n", + "print(\"\\nThe output of the reference function is: \\n{}\".format(ref_output))\n", + "\n", + "if (output[0] == ref_output).all():\n", + " print(\"\\nThe results are the same!\")\n", + "else:\n", + " raise Exception(\"Something went wrong, the output of the model doesn't match the expected output!\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have verified that the model works as we expected it to, we can continue working with the graph." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### How to manipulate an ONNX model\n", + "\n", + "In the model there are two successive adder nodes. An adder node in ONNX can only add two inputs, but there is also the [**sum**](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Sum) node, which can process more than one input. So it would be a reasonable change of the graph to combine the two successive adder nodes to one sum node." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the following we assume that we do not know the appearance of the model, so we first try to identify whether there are two consecutive adders in the graph and then convert them into a sum node. \n", + "\n", + "Here we make use of FINN. FINN provides a thin wrapper around the model which provides several additional helper functions to manipulate the graph. The code can be found [here](https://github.com/Xilinx/finn/blob/master/src/finn/core/modelwrapper.py) and you can find a more detailed description in the notebook [ModelWrapper](2_modelwrapper.ipynb)." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.core.modelwrapper import ModelWrapper\n", + "finn_model = ModelWrapper(onnx_model)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As explained in the previous section, it is important that the nodes are listed in the correct order. If a new node has to be inserted or an old node has to be replaced, it is important to do that in the appropriate position. The following function serves this purpose. It returns a dictionary, which contains the node name as key and the respective node index as value." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "def get_node_id(model):\n", + " node_index = {}\n", + " node_ind = 0\n", + " for node in model.graph.node:\n", + " node_index[node.name] = node_ind\n", + " node_ind += 1\n", + " return node_index" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The function scans the list of nodes and stores a run index (`node_ind`) as node index in the dictionary for every node name.\n", + "\n", + "Another helper function is being implemented that searches for adder nodes in the graph and returns the found nodes. This is needed to determine if and which adder nodes are in the given model." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "def identify_adder_nodes(model):\n", + " add_nodes = []\n", + " for node in model.graph.node:\n", + " if node.op_type == \"Add\":\n", + " add_nodes.append(node)\n", + " return add_nodes" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The function iterates over all nodes of the model and if the operation type is `\"Add\"` the node will be stored in `add_nodes`. At the end `add_nodes` is returned.\n", + "\n", + "If we apply this to our model, three nodes should be returned." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found adder node: Add1\n", + "Found adder node: Add2\n", + "Found adder node: Add3\n" + ] + } + ], + "source": [ + "add_nodes = identify_adder_nodes(finn_model)\n", + "for node in add_nodes:\n", + " print(\"Found adder node: {}\".format(node.name))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Among other helper functions, `ModelWrapper` offers two functions that can help to determine the preceding and succeeding node of a node. However, these functions are not getting a node as input, but can determine the consumer or producer of a tensor. We write two functions that uses these helper functions to determine the previous and the next node of a node." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "def find_predecessor(model, node):\n", + " predecessors = []\n", + " for i in range(len(node.input)):\n", + " producer = model.find_producer(node.input[i])\n", + " predecessors.append(producer)\n", + " return predecessors\n", + " \n", + "\n", + "def find_successor(model, node):\n", + " successors = []\n", + " for i in range(len(node.output)):\n", + " consumer = model.find_consumer(node.output[i])\n", + " successors.append(consumer)\n", + " return successors" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first function uses `find_producer` from `ModelWrapper` to create a list of the producers of the inputs of the given node. So the returned list is indirectly filled with the predecessors of the node. The second function works in a similar way, `find_consumer` from `ModelWrapper` is used to find the consumers of the output tensors of the node and so a list with the successors can be created. " + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "def adder_pair(model, node):\n", + " adder_pairs = []\n", + " node_pair = []\n", + " successor_list = find_successor(model, node)\n", + " \n", + " for successor in successor_list:\n", + " if successor.op_type == \"Add\":\n", + " node_pair.append(node)\n", + " node_pair.append(successor)\n", + " adder_pairs.append((node_pair))\n", + " node_pair = []\n", + " return adder_pairs\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The function gets a node and the model as input. Two empty lists are created to be filled with a list of adder node pairs that can be returned as result of the function. Then the function `find_successor` is used to return all of the successors of the node. If one of the successors is an adder node, the node is saved in `node_pair` together with the successive adder node and put in the list `adder_pairs`. Then the temporary list is cleaned and can be filled with the next adder node pair. Since it is theoretically possible for an adder node to have more than one subsequent adder node, a list of lists is created. This list of the node with all its successive adder nodes is returned.\n", + "\n", + "So now we can find out which adder node has an adder node as successor. Since the model is known, one adder pair (Add1+Add2) should be found when applying the function to the previously determined adder node list (`add_nodes`)." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found following pair that could be replaced by a sum node:\n", + "Add1\n", + "Add2\n" + ] + } + ], + "source": [ + "for node in add_nodes:\n", + " add_pairs = adder_pair(finn_model, node)\n", + " if len(add_pairs) != 0:\n", + " for i in range(len(add_pairs)):\n", + " substitute_pair = add_pairs[i]\n", + " print(\"Found following pair that could be replaced by a sum node:\")\n", + " for node_pair in add_pairs:\n", + " for node in node_pair:\n", + " print(node.name)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that the pair to be replaced has been identified (`substitute_pair`), a sum node can be instantiated and inserted into the graph at the correct position. \n", + "\n", + "First of all, the inputs must be determined. For this the adder nodes inputs are used minus the input, which corresponds to the output of the other adder node." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The new node gets the following inputs: \n", + "['in1', 'in2', 'in3']\n" + ] + } + ], + "source": [ + "input_list = []\n", + "for i in range(len(substitute_pair)):\n", + " if i == 0:\n", + " for j in range(len(substitute_pair[i].input)):\n", + " if substitute_pair[i].input[j] != substitute_pair[i+1].output[0]:\n", + " input_list.append(substitute_pair[i].input[j])\n", + " else:\n", + " for j in range(len(substitute_pair[i].input)):\n", + " if substitute_pair[i].input[j] != substitute_pair[i-1].output[0]:\n", + " input_list.append(substitute_pair[i].input[j])\n", + "print(\"The new node gets the following inputs: \\n{}\".format(input_list))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The output of the sum node matches the output of the second adder node and can therefore be taken over directly." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "sum_output = substitute_pair[1].output[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The summary node can be created with this information." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "Sum_node = onnx.helper.make_node(\n", + " 'Sum',\n", + " inputs=input_list,\n", + " outputs=[sum_output],\n", + " name=\"Sum\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The node can now be inserted into the graph and the old nodes are removed." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "node_ids = get_node_id(finn_model)\n", + "node_ind = node_ids[substitute_pair[0].name]\n", + "graph.node.insert(node_ind, Sum_node)\n", + "\n", + "for node in substitute_pair:\n", + " graph.node.remove(node)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To insert the node in the right place, the index of the first node of the substitute_pair is used as node index for the sum node and embedded into the graph using `.insert`. Then the two elements in `substitute_pair` are deleted using `.remove`. `.insert` and `.remove` are functions provided by ONNX." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The new graph is saved as ONNX model and can be visualized with Netron." + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "onnx_model1 = onnx.helper.make_model(graph, producer_name=\"simple-model1\")\n", + "onnx.save(onnx_model1, 'simple_model1.onnx')" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving 'simple_model1.onnx' at http://0.0.0.0:8081\n" + ] + } + ], + "source": [ + "import netron\n", + "netron.start('simple_model1.onnx', port=8081, host=\"0.0.0.0\")" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%html\n", + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Through the visualization it can already be seen that the insertion was successful, but it is still to be checked whether the result remains the same. Therefore the result of the reference function written in the previous section is used and the new model with the input values is simulated. At this point onnxruntime can be used again. The simulation is analogous to the one of the first model in the previous section." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "sess = rt.InferenceSession(onnx_model1.SerializeToString())\n", + "output = sess.run(None, input_dict)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The output of the manipulated ONNX model is: \n", + "[[12. 9. 14. 8.]\n", + " [ 9. 9. 4. 6.]\n", + " [ 3. 19. 9. 5.]\n", + " [ 8. 22. 7. 2.]]\n", + "\n", + "The output of the reference function is: \n", + "[[12. 9. 14. 8.]\n", + " [ 9. 9. 4. 6.]\n", + " [ 3. 19. 9. 5.]\n", + " [ 8. 22. 7. 2.]]\n", + "\n", + "The results are the same!\n" + ] + } + ], + "source": [ + "print(\"The output of the manipulated ONNX model is: \\n{}\".format(output[0]))\n", + "print(\"\\nThe output of the reference function is: \\n{}\".format(ref_output))\n", + "\n", + "if (output[0] == ref_output).all():\n", + " print(\"\\nThe results are the same!\")\n", + "else:\n", + " raise Exception(\"Something went wrong, the output of the model doesn't match the expected output!\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/FINN-ModelWrapper.ipynb b/notebooks/basics/2_modelwrapper.ipynb similarity index 89% rename from notebooks/FINN-ModelWrapper.ipynb rename to notebooks/basics/2_modelwrapper.ipynb index ca9c4c6e43584cfcb12c795e5896e726a40fc5d6..6b3cd0337d938c100e0f71e61f8505a5b7377505 100644 --- a/notebooks/FINN-ModelWrapper.ipynb +++ b/notebooks/basics/2_modelwrapper.ipynb @@ -42,17 +42,17 @@ "source": [ "### Create a ModelWrapper instance\n", "\n", - "<font size=\"3\">Here we use a premade ONNX file on disk to load up the ModelWrapper, but this could have been produced from e.g. a trained Brevitas PyTorch model. See [this notebook](brevitas-network-import.ipynb) for more details.</font>" + "<font size=\"3\">Here we use a premade ONNX file on disk to load up the ModelWrapper, but this could have been produced from e.g. a trained Brevitas PyTorch model. See [this notebook](3_brevitas_network_import.ipynb) for more details.</font>" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "from finn.core.modelwrapper import ModelWrapper\n", - "model = ModelWrapper(\"LFCW1A1.onnx\")" + "model = ModelWrapper(\"../LFCW1A1.onnx\")" ] }, { @@ -66,7 +66,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -132,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -162,7 +162,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -208,7 +208,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -235,7 +235,7 @@ "\n", "Optionally, the dtype (container datatype) of the tensor can also be specified as third argument. By default it is set to TensorProto.FLOAT. \n", " \n", - "**Important:** dtype should not be confused with FINN data type, which specifies the quantization annotation. See the remarks about FINN-ONNX in [this notebook](finn-basics.ipynb). It is safest to use floating point tensors as the container data type for best compatibility inside FINN.</font>" + "**Important:** dtype should not be confused with FINN data type, which specifies the quantization annotation. See the remarks about FINN-ONNX in [this notebook](0_getting_started.ipynb). It is safest to use floating point tensors as the container data type for best compatibility inside FINN.</font>" ] }, { @@ -249,12 +249,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<font size=\"3\">FINN introduces its [own data types](https://github.com/Xilinx/finn/blob/dev/src/finn/core/datatype.py) because ONNX does not natively support precisions less than 8 bits. FINN is about quantized neural networks, so precision of i.e. 4 bits, 3 bits, 2 bits or 1 bit are of interest. To represent the data within FINN, float tensors are used with additional annotation to specify the quantized data type of a tensor. The following helper functions are about this quantization annotation.</font>" + "<font size=\"3\">FINN introduces its [own data types](https://github.com/Xilinx/finn/blob/master/src/finn/core/datatype.py) because ONNX does not natively support precisions less than 8 bits. FINN is about quantized neural networks, so precision of i.e. 4 bits, 3 bits, 2 bits or 1 bit are of interest. To represent the data within FINN, float tensors are used with additional annotation to specify the quantized data type of a tensor. The following helper functions are about this quantization annotation.</font>" ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -291,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -330,7 +330,7 @@ "metadata": {}, "source": [ "### More helper functions\n", - "<font size=\"3\">ModelWrapper contains more useful functions, if you are interested please have a look at the [Python code](https://github.com/Xilinx/finn/blob/dev/src/finn/core/modelwrapper.py) directly. Additionally, in the folder notebooks/ a Jupyter notebook about transformation passes [FINN-HowToTransformationPass](FINN-HowToTransformationPass.ipynb) and one about analysis passes [FINN-HowToAnalysisPass](FINN-HowToAnalysisPass.ipynb) can be found.</font>" + "<font size=\"3\">ModelWrapper contains more useful functions, if you are interested please have a look at the [Python code](https://github.com/Xilinx/finn/blob/master/src/finn/core/modelwrapper.py) directly. " ] }, { diff --git a/notebooks/brevitas-network-import.ipynb b/notebooks/basics/3_brevitas_network_import.ipynb similarity index 81% rename from notebooks/brevitas-network-import.ipynb rename to notebooks/basics/3_brevitas_network_import.ipynb index 404242908bca1c34ea600cc9616817975e35deca..30026e7aaa541641d4068ca0a7a0a3cf7c14088f 100644 --- a/notebooks/brevitas-network-import.ipynb +++ b/notebooks/basics/3_brevitas_network_import.ipynb @@ -17,10 +17,11 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ + "import onnx\n", "import inspect\n", "\n", "def showSrc(what):\n", @@ -38,7 +39,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -103,7 +104,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -298,7 +299,7 @@ ")" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -322,12 +323,12 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 10, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAATB0lEQVR4nO3dfWxd5X0H8O/3XttxXhwSJ8GYJECIYIPSNQUPWoEmCm2agqbAtFGiFZGK1f0DpNKyF8S0FU3ahmAtmraOyW0iwsroulJWptEXSJkoaKAkKCThNbwkEC/EhLzY5MW5vve3P3zo3ODze8w999xzm+f7kSzb9+dz7pNrf3Pt+zvP89DMICInvlLRAxCR5lDYRSKhsItEQmEXiYTCLhKJtmbeWQenWSdmNvMuTwwM1LM0VBg4ecZuDcvl9FNXq9nO3ZZ+bgCwsfrPz/Z2/9yVSt3nztNRHMIxG530m5op7CRXAPh7AGUA3zGzO7yv78RMXMTLs9zliSkQOC8wAGBjY/XfdXuHf+5QIGt+vTz7pNRa9cBB/9wB5bnz3Hr13X3pxcB/Ym09p7r1scH/detFecbWp9bq/jWeZBnAtwB8DsC5AFaRPLfe84lIvrL8zX4hgFfN7HUzOwbgewBWNmZYItJoWcK+EMBbEz7fldz2K0j2k9xIcmMFoxnuTkSyyP3VeDMbMLM+M+trx7S8705EUmQJ+yCAxRM+X5TcJiItKEvYNwA4i+QSkh0ArgXwcGOGJSKNVnfrzczGSN4E4KcYb72tNbPnGzayiJTnzHHr1f376z53acYMt147fNg/QaAtWJ4926277bWS31IMCrQkvfYa2/wf/eqeoXpG1NIy9dnN7BEAjzRoLCKSI10uKxIJhV0kEgq7SCQUdpFIKOwikVDYRSLR1PnssQr1uu3YsWznn5m+RkDt0KFs5w6MvTo8XPe523p73HpoGmmwF+5cI1DuPcW/77d2+acOTQ2uZPue5kHP7CKRUNhFIqGwi0RCYReJhMIuEgmFXSQSar01QXAaaUhgmmmm9lqe5w7IukJreU76yrWAP7021FoLTd3N/D0tgJ7ZRSKhsItEQmEXiYTCLhIJhV0kEgq7SCQUdpFIqM/eBOV53W7d3W0UQHn+fP8ORp1ttU72dzq1Gf4uPaUhfxnrvcvPdOvzvrgztVap+UtBt31tlluvvfCaW88iy9TdVqVndpFIKOwikVDYRSKhsItEQmEXiYTCLhIJhV0kEuqzN0Gojx7y8t2L3Porn1qTWjti/pLGpcD/93tr/vE9Zb9P75nGdre+fNZqt14u+2O3SnqN0/xxm3ftAoBSV5dbr42MuPUiZAo7yR0ARgBUAYyZWV8jBiUijdeIZ/ZPmdneBpxHRHKkv9lFIpE17AbgZyQ3keyf7AtI9pPcSHJjBf7fQSKSn6y/xl9iZoMkTwbwKMmXzOyJiV9gZgMABgBgNrst4/2JSJ0yPbOb2WDyfgjAQwAubMSgRKTx6g47yZkku97/GMByANsaNTARaawsv8b3AHiI4+uOtwH4VzP7SUNGdYIJbXt8YOVvufUvfPQXbr3M9P+zd1b8v5y6Sn4f/bQ2f0551WpufdTGUmv7a/7a6x/7h+f8c9f8H98tf/HbqbVpP97gHtu28FS3nnXN+yLUHXYzex3Axxo4FhHJkVpvIpFQ2EUiobCLREJhF4mEwi4SCZo176K22ey2i3h50+7v18Wql/w2zurZQ279tcp7qbWl7X7r7GDtiFsvw9/SOTRFtob01tysUqd7bMgbzr8bAJ47dkpq7R+/dI17bPm/n/Xrc+e69ep+fwnuvDxj6zFs+yb9pumZXSQSCrtIJBR2kUgo7CKRUNhFIqGwi0RCYReJxK/XUtJ0er7ONM/xst8vtlqG6w0C0zwRuJZhzW1Xu/VT71zr1pfPSO+lh6agjtSqbv2y+//ErS99wO8n1zrTl4t+c4W/HPOG/m+69SWBawjmlNKvT/jjK/2lpH/j1YVufWzXoFtvRXpmF4mEwi4SCYVdJBIKu0gkFHaRSCjsIpFQ2EUi0fz57KVP13+CJo61kbLOfT6y0t97Y9856ZdLjE13D4W1+Y/pmf8W6KNvecm/gwzO2+Q/F311wRNufVFgGWzPZ09d5tY1n11EWpbCLhIJhV0kEgq7SCQUdpFIKOwikVDYRSLR/Pns3rzz0LzwImWYS5+15zr9Pze59UWPlFNrVvG3ZC7POcmtVw8cdOuh7ahRS/+e1o4edQ996m7/+oKv/o3fZ8+C7R1uvag+ehbBZ3aSa0kOkdw24bZuko+S3J68968wEJHCTeXX+HsBrDjutlsBrDezswCsTz4XkRYWDLuZPQFg33E3rwSwLvl4HYCrGjwuEWmwev9m7zGz3cnHbwPoSftCkv0A+gGgE4G/70QkN5lfjbfxmTSpsynMbMDM+sysrx3+In8ikp96w76HZC8AJO/9bUZFpHD1hv1hANcnH18P4EeNGY6I5CX4NzvJBwBcCmA+yV0Avg7gDgDfJ3kDgJ0A/M2uJwqsU143rw8OZF5XPnS8xwL/5vL8eW69uvfdTOd3j61mu7ahdnQ08AX1j23ulgNuPct89Yr54ypND+wdH6hXh4c/7JByFwy7ma1KKV3e4LGISI50uaxIJBR2kUgo7CKRUNhFIqGwi0RCWzYngls21/ypou59t/kPc3Wf32IKKc/rdk7ut5hCU1iDAq21Umd6i4rT/XWu31ru/LumwNuu+r1aoGU4LXC159hYHSMqlp7ZRSKhsItEQmEXiYTCLhIJhV0kEgq7SCQUdpFItFafPTRN1RNYhtrGitvu2QK97qyq7x6/ROD/Y6Bf7PXBAYCd/vGhPr23XHRbYBnrpb/7mlvfWz3k1ueXZ6bWRkLLlgeuHzghl5IWkRODwi4SCYVdJBIKu0gkFHaRSCjsIpFQ2EUi0dw+O/253eE55fn2q12l9G2RWU6vTUWe2ypXLj7PPXbvR/0+etsR/3uyYN2zbt01w5/PPnDm/W49yyLY3z1wgVv3rl0AgFJXl1uvjYx86DHlTc/sIpFQ2EUiobCLREJhF4mEwi4SCYVdJBIKu0gkmjyfnf767lbJ8a79ufKlwBrmnJk+N5od7e6xb646w61b4Ltwxoo33Pqc9vTH9NaF33KPPafdH3st0M3+0hf9zXxrlj62L5z8Y/fYo+b3+HvK/jUCd+1bmlp76rNL3GNLM/wtl1uxjx4SfGYnuZbkEMltE267neQgyc3J2xX5DlNEsprKr/H3Algxye13m9my5O2Rxg5LRBotGHYzewKAf+2giLS8LC/Q3URyS/Jr/ty0LyLZT3IjyY0VS1+PTETyVW/Y7wGwFMAyALsBfCPtC81swMz6zKyvnf7ihiKSn7rCbmZ7zKxqZjUA3wZwYWOHJSKNVlfYSfZO+PRqANvSvlZEWkOwz07yAQCXAphPcheArwO4lOQyAAZgB4AvT+nezNy522zv8A8PzPv2lM89263v+L15bv03P7M9tTaw5N/dY731ywHgjcp7bn1J+yy3vmss/fhFbf6xIW9Ujrj1f178mFufUUr/nr5S8dd9Py3j2Be0pffCX/6a32c/669OvOevYNjNbNUkN6/JYSwikiNdLisSCYVdJBIKu0gkFHaRSCjsIpFo/pbNzpLMWVprIS/fkHpFLwDgtWv/ya1vGk0fW6i1FtJV8qffrj/iL1W99Wj6ctErZ/ktpAVl/0cg1PYL2V89nFo7u91/3CrmLx1+2Pyfl9Wzh1Jrl33+LvfYaz6y2q13/6H/uLXils56ZheJhMIuEgmFXSQSCrtIJBR2kUgo7CKRUNhFItH8Pruz7XKWrYlDW+i+9Hl/SWXA72VfMC19qubTR/1+8EMH/e2BH7vnk259/sD/uPXKp9PP/8rfnuIee+OCx936R/xZx/jJYX8558Vt6VNkz//pH7nHTt/p3/knr9zi1tec9mRq7aj51zY8vewHbn3Fg1e6dVymPruIFERhF4mEwi4SCYVdJBIKu0gkFHaRSCjsIpGgBbbFbaSTSvPsE53pG77WjvrbQ7Wdvji1NvbmLvfYe3f+wq2HNoteWJ6RWit721ADWHPQ73Uv63zTrb9dne3WXx/tSa1d3fW8e2y3s9Qz4C8FDQAvHkufrw4Aq//yltTanPv86wdC2hYtdOv8bvp20/cu9fvo71T9PvzMUmAr69Mucet5ecbWY9j2TTp4PbOLREJhF4mEwi4SCYVdJBIKu0gkFHaRSCjsIpFoap99NrvtIl6ey7lD2z3v+cGZbv2pC+5z616/ebezZTIA9Aa2Hg5t2byobbpbH7X0qwRmlTrdY28avMit//w//Ln4J2/2r1CY9l8bUmttvf71B2N73nHrpY52t+5dt1FZ3uce+/N7v+PWz3nqOrd+2h9sdet5ydRnJ7mY5OMkXyD5PMmvJLd3k3yU5Pbkvb8Lg4gUaiq/xo8BuMXMzgXwCQA3kjwXwK0A1pvZWQDWJ5+LSIsKht3MdpvZs8nHIwBeBLAQwEoA65IvWwfgqrwGKSLZfag16EieAeDjAJ4B0GNmu5PS2wAmvUCbZD+AfgDoRPr15SKSrym/Gk9yFoAHAdxsZsMTazb+Kt+kr/SZ2YCZ9ZlZXzv8xQlFJD9TCjvJdowH/X4z+2Fy8x6SvUm9F0D6lpkiUrhg640kMf43+T4zu3nC7XcBeNfM7iB5K4BuM/tT71yh1lvbktPdsYy9sdOte0qdfgsKZ5/hlof+On1K4/knD7rHvjo83613dYy69Z37/UZHz53pbUE+7W/ZXJ7lb5tsx7Jtox2atuzhNP83QRv1HzfQmaYa+Lkvz5/n1qv7Dvj37SyZniev9TaVv9kvBnAdgK0kNye33QbgDgDfJ3kDgJ0ArmnEYEUkH8Gwm9mTANL+i8znChkRaThdLisSCYVdJBIKu0gkFHaRSCjsIpFo6pbNLJdQnpW+LHKoj+71Pqt73/Xve7o/TbS65SW3vuD303u+bwb6vR30l1seDfR8T+30e7peL7s0w79EuTo87NazKs1M7+PXDh3yjw302auBx50d6dcfhHr0oZ+n8mx/ee+8H9d66JldJBIKu0gkFHaRSCjsIpFQ2EUiobCLREJhF4lEU/vsVq25/cfgHGKn9xlaSrq6f78/uIDywt7U2tjrO/yDA330UleXW6+NjPjn95TL9R8LoDznJLdePXDQrdcO+9cYuOcO9apL/r8tON/dO3XB1yfkQc/sIpFQ2EUiobCLREJhF4mEwi4SCYVdJBIKu0gkmtpnDwnNIfZYJdv65iHBXnoGmfroOZ871EcPynNL8BzXZs9yfUCr0jO7SCQUdpFIKOwikVDYRSKhsItEQmEXiYTCLhKJYNhJLib5OMkXSD5P8ivJ7beTHCS5OXm7Iv/hiki9pnJRzRiAW8zsWZJdADaRfDSp3W1mf5ff8ESkUaayP/tuALuTj0dIvghgYd4DE5HG+lB/s5M8A8DHATyT3HQTyS0k15Kcm3JMP8mNJDdWUP8yQSKSzZTDTnIWgAcB3GxmwwDuAbAUwDKMP/N/Y7LjzGzAzPrMrK8d/t5dIpKfKYWdZDvGg36/mf0QAMxsj5lVzawG4NsALsxvmCKS1VRejSeANQBeNLNvTrh94nKrVwPY1vjhiUijTOXV+IsBXAdgK8nNyW23AVhFchkAA7ADwJdzGaGINMRUXo1/EgAnKT3S+OGISF50BZ1IJBR2kUgo7CKRUNhFIqGwi0RCYReJhMIuEgmFXSQSCrtIJBR2kUgo7CKRUNhFIqGwi0RCYReJBC3PLXWPvzPyHQA7J9w0H8Depg3gw2nVsbXquACNrV6NHNvpZrZgskJTw/6BOyc3mllfYQNwtOrYWnVcgMZWr2aNTb/Gi0RCYReJRNFhHyj4/j2tOrZWHRegsdWrKWMr9G92EWmeop/ZRaRJFHaRSBQSdpIrSL5M8lWStxYxhjQkd5DcmmxDvbHgsawlOURy24Tbukk+SnJ78n7SPfYKGltLbOPtbDNe6GNX9PbnTf+bnWQZwCsAPgNgF4ANAFaZ2QtNHUgKkjsA9JlZ4RdgkPwdAO8BuM/MzktuuxPAPjO7I/mPcq6Z/VmLjO12AO8VvY13sltR78RtxgFcBWA1CnzsnHFdgyY8bkU8s18I4FUze93MjgH4HoCVBYyj5ZnZEwD2HXfzSgDrko/XYfyHpelSxtYSzGy3mT2bfDwC4P1txgt97JxxNUURYV8I4K0Jn+9Ca+33bgB+RnITyf6iBzOJHjPbnXz8NoCeIgczieA23s103DbjLfPY1bP9eVZ6ge6DLjGz8wF8DsCNya+rLcnG/wZrpd7plLbxbpZJthn/pSIfu3q3P8+qiLAPAlg84fNFyW0twcwGk/dDAB5C621Fvef9HXST90MFj+eXWmkb78m2GUcLPHZFbn9eRNg3ADiL5BKSHQCuBfBwAeP4AJIzkxdOQHImgOVova2oHwZwffLx9QB+VOBYfkWrbOOdts04Cn7sCt/+3Mya/gbgCoy/Iv8agD8vYgwp4zoTwHPJ2/NFjw3AAxj/ta6C8dc2bgAwD8B6ANsBPAagu4XG9i8AtgLYgvFg9RY0tksw/iv6FgCbk7crin7snHE15XHT5bIikdALdCKRUNhFIqGwi0RCYReJhMIuEgmFXSQSCrtIJP4PSkcHEGlbZOgAAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAARX0lEQVR4nO3dfYyVZXrH8d/FoDAw8iYRCaisG/5QqmUbgk1KyOKmxlUMbKJm/aPauAmarMmqTVqz/UOSaqJVa/pH3YStL9CsmiWoq0a7a82mWo1GNFQQW1CULGR4E5H3t+HqH/NgZ3We6549z3nOc9z7+0kmM3Ouec65OTM/zsv13Pdt7i4Af/xGNT0AAJ1B2IFMEHYgE4QdyARhBzIxupM3Zma89Z+ZUaPKH09OnTpV23VXvf6enp6wPjAw0PJ1183dbbjLK4XdzK6U9M+SeiT9q7vfV+X6cmU27O/mS6k/6ip/eKNHx38CqcCk6r29vaW1Q4cOhcem9PX1hfUDBw6U1lIt50mTJoX1zz77LKx3o5afxptZj6R/kfR9SRdLusHMLm7XwAC0V5XX7PMlfeTuW9z9uKSnJS1pz7AAtFuVsM+Q9Lsh328rLvs9ZrbMzNaa2doKtwWgotrfoHP3FZJWSLxBBzSpyiP7dknnDfl+ZnEZgC5UJezvSJptZt8yszMl/VDS8+0ZFoB2a/lpvLufNLPbJP1ag623x9z9g7aNLCPjx48P6wcPHmz5useMGRPWjx07FtZTbcFx48aF9ai9lmoppqSOj9prqT76vn37WhpTN6v0mt3dX5L0UpvGAqBGnC4LZIKwA5kg7EAmCDuQCcIOZIKwA5mwTq4um+vpsqled6qXffTo0bA+duzYlo9Nia676vWfffbZYb3qNNLofp06dWp47O7du8N6amrwyZMnw3qdyuaz88gOZIKwA5kg7EAmCDuQCcIOZIKwA5mg9fYNkGrNVfkd1nnddUtNDa6yem1q6m5qanCTS03TegMyR9iBTBB2IBOEHcgEYQcyQdiBTBB2IBP02TvgrLPOCuvRbqOSNHHixLB+4sSJ0lpqN9LUFNbPP/88rC9YsCCs33rrraW1VC/6jjvuCOtbt24N601OM20SfXYgc4QdyARhBzJB2IFMEHYgE4QdyARhBzJBn/0b4JFHHgnrUS871Wuuuox1b29vWI+ktk2+5JJLwvqmTZvC+vHjx0trZ5xxRnhsdO6ClP53HzlyJKzXqazPXmnLZjP7VNIBSQOSTrr7vCrXB6A+lcJeWOTue9pwPQBqxGt2IBNVw+6SfmNm75rZsuF+wMyWmdlaM1tb8bYAVFD1afwCd99uZudIesXM/sfdXxv6A+6+QtIKiTfogCZVemR39+3F512SnpU0vx2DAtB+LYfdzMab2Vmnv5Z0haQN7RoYgPaq8jR+mqRniz7taElPuvu/t2VUf2RSWzYvWrQorF922WVhPeqVHzx4MDw21W/u6+sL66nzNKI566m11x999NGWr1uS7rzzztLaW2+9FR5b93bSTWg57O6+RdKftnEsAGpE6w3IBGEHMkHYgUwQdiAThB3IBFNcu0Bqqubs2bPD+v79+0trEyZMCI+NpoFK6SmwVbZ8TrX9UlJLcO/du7e0tnTp0vDYdevWhfVUSzLV8qwTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJdiw42TFRT7fOfnBK6thU/ZZbbgnrq1atCuszZ85s+bZTffZ77rknrK9evTqsn3nmmaW1K664Ijz2wQcfDOuprbCj2168eHF47LZt28L6nj3fvDVWeWQHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiATHZ/Pnup3Rzo51naqOvd54cKFYf2iiy4qrY0bNy48dvTo+FSLNWvWhPUtW7aE9SpSyz3PmTMnrKfu90jq75T57AC6FmEHMkHYgUwQdiAThB3IBGEHMkHYgUx0vM8+alT5/y9V54XXqcpc+lOnTlW67eg+S9VPnjwZHjt+/PiwfujQobCe2o46+p2l5tJfffXVYf3pp58O61X67Kk17VP3a5Na7rOb2WNmtsvMNgy5bIqZvWJmm4vPk9s5WADtN5Kn8U9IuvIrl90l6VV3ny3p1eJ7AF0sGXZ3f03SV/fRWSJpZfH1SknxXjoAGtfqGnTT3L2/+HqHpGllP2hmyyQta/F2ALRJ5QUn3d2jDRvdfYWkFRIbOwJNarX1ttPMpktS8XlX+4YEoA6thv15STcVX98k6VftGQ6AuiT77Gb2lKTvSpoqaaekuyU9J+mXks6XtFXS9e5evhn2/19XbU/jq64bX7UeSfVkU3uoR/uvV9Xb2xvWjxw5EtZT5wBUOcfgwgsvDOsff/xxy9edGldqTfqUw4cPVzq+irI+e/I1u7vfUFL6XqURAegoTpcFMkHYgUwQdiAThB3IBGEHMsGWzYVUC3JgYCCsR3p6esJ61WWHozZRqsWUmsKakrr+aNvkqCZJixYtamlMp0W/0xMnToTHpqa4Vvl7aAqP7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZKKr+ux1budcdTnnKuq+7QMHDpTWUv3iVK87dXyqTx8tF51axvq6664L60ePHg3rY8eOLa2l+uyp31mTWzK3ikd2IBOEHcgEYQcyQdiBTBB2IBOEHcgEYQcy0fE+ezS3u5t75dGSyanllFPq3Fb50ksvDY+dM2dOWE8tJf3cc8+F9UjUB5ekhQsXhvUqW3inlqGOzl2Qqi/B3QQe2YFMEHYgE4QdyARhBzJB2IFMEHYgE4QdyETH++zRnPU6++ipufKped1RT3j06PhuXLp0aVhPHb9kyZKwPmbMmNLa3Llzw2MnTZoU1lO97Ndff73l42fPnh0em1qbPdXrXr9+fWnt8ssvD4+N7lOpO/voKclHdjN7zMx2mdmGIZctN7PtZrau+Liq3mECqGokT+OfkHTlMJc/7O5zi4+X2jssAO2WDLu7vyZpbwfGAqBGVd6gu83M3i+e5k8u+yEzW2Zma81sbYXbAlBRq2H/maRvS5orqV/SQ2U/6O4r3H2eu89r8bYAtEFLYXf3ne4+4O6nJP1c0vz2DgtAu7UUdjObPuTbH0jaUPazALqDpfqoZvaUpO9Kmippp6S7i+/nSnJJn0q6xd37kzdmFt5Yqt+cmvcdmTVrVli/5pprwvrixYtLa6l516l526m509H+61K8hnlfX194bErVed3R7/SLL74Ij504cWJYT9m8eXNpbdWqVeGxDz1U+spUUnf32d192JNKkifVuPsNw1z8aOURAegoTpcFMkHYgUwQdiAThB3IBGEHMpFsvbX1xsw8Wna5zimud999d1hfvnx5WN+zZ09pberUqa0M6UuprYf37o2nJkT1Cy64IDw21RZMbdmccuzYsdJaahpp6u8h1YqNpi2ntlx++eWXw/rNN98c1pvc0rms9cYjO5AJwg5kgrADmSDsQCYIO5AJwg5kgrADmeh4nz2qV9maODXVMtX3rLLt8q5du8L61q1bw/oDDzwQ1levXh3W580rXwTo4YcfDo9Nbdk8eXLpimOSpG3btoX16Hf6xBNPhMd+8sknYf3aa68N69HU46rTa1988cWwnpoyXSf67EDmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZKKjffZRo0Z5ND/6+PHj4fHnnHNOaW337t3hsak+e2rudNQvTm0HvWnTprA+ZcqUsJ5atjha7vn8888Pj03NZ08t771v376wfuONN5bWXnjhhfDYlNQ6AtFy0YsWLQqPTa0xkLpfUst/14k+O5A5wg5kgrADmSDsQCYIO5AJwg5kgrADmeiq+exVpPqeK1euDOvXX399y9d/+PDh8Nhx48aF9dS2yKl5/gMDA6W11Lrvb775Zlh/8sknw/q6devC+htvvFFaS51fkOrhp37n0Xkb8+fPD499++23w/rjjz8e1lPrytep5T67mZ1nZr81s41m9oGZ/aS4fIqZvWJmm4vP8SoHABo1kqfxJyX9jbtfLOnPJf3YzC6WdJekV919tqRXi+8BdKlk2N29393fK74+IOlDSTMkLZF0+rnxSklL6xokgOriFz1fYWazJH1H0tuSprl7f1HaIWlayTHLJC1rfYgA2mHE78abWZ+kNZJud/f9Q2s++C7fsG++ufsKd5/n7uWrIgKo3YjCbmZnaDDov3D3Z4qLd5rZ9KI+XVK8xCqARiVbbzY4f3OlpL3ufvuQyx+Q9Jm732dmd0ma4u5/m7iu8MbOPffccCw7duwI65Fo+15JmjlzZli/9957S2szZswIj01tuZzaujjaLlqS7r///tLaxo0bw2NTU1xT2yKnpKYtR1JtwxMnToT1aOpx6u9+woQJYb3qlOk6lbXeRvKa/S8k/ZWk9WZ2uqn6U0n3Sfqlmf1I0lZJcaMaQKOSYXf3/5JU9l/k99o7HAB14XRZIBOEHcgEYQcyQdiBTBB2IBMdneLa09PjUV83NVU06n3u37+/tCZJfX19YT3VN416vlX6vVK655s6RyDqZad6+MeOHQvrVUW/79Ryzampwam/lyq/s5SqY6sTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJrlpKOjWHOOqlp5YVrjove/r06aW1/v7+0tpI9Pb2hvXUls11XndqGetDhw6F9SpzylNGjYofq6rMKW/6/IQq6LMDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJruqzA6iOPjuQOcIOZIKwA5kg7EAmCDuQCcIOZIKwA5lIht3MzjOz35rZRjP7wMx+Uly+3My2m9m64uOq+ocLoFXJk2rMbLqk6e7+npmdJeldSUs1uB/7QXd/cMQ3xkk1QO3KTqoZyf7s/ZL6i68PmNmHkma0d3gA6vYHvWY3s1mSviPp7eKi28zsfTN7zMwmlxyzzMzWmtnaSiMFUMmIz403sz5J/ynpXnd/xsymSdojySX9gwaf6t+cuA6exgM1K3saP6Kwm9kZkl6U9Gt3/6dh6rMkvejuf5K4HsIO1KzliTA2uDzoo5I+HBr04o27034gaUPVQQKoz0jejV8g6XVJ6yWdXpv3p5JukDRXg0/jP5V0S/FmXnRdPLIDNav0NL5dCDtQP+azA5kj7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmkgtOttkeSVuHfD+1uKwbdevYunVcEmNrVTvHdkFZoaPz2b9242Zr3X1eYwMIdOvYunVcEmNrVafGxtN4IBOEHchE02Ff0fDtR7p1bN06LomxtaojY2v0NTuAzmn6kR1AhxB2IBONhN3MrjSz/zWzj8zsribGUMbMPjWz9cU21I3uT1fsobfLzDYMuWyKmb1iZpuLz8PusdfQ2LpiG+9gm/FG77umtz/v+Gt2M+uRtEnSX0raJukdSTe4+8aODqSEmX0qaZ67N34ChpktlHRQ0qrTW2uZ2T9K2uvu9xX/UU5297/rkrEt1x+4jXdNYyvbZvyv1eB9187tz1vRxCP7fEkfufsWdz8u6WlJSxoYR9dz99ck7f3KxUskrSy+XqnBP5aOKxlbV3D3fnd/r/j6gKTT24w3et8F4+qIJsI+Q9Lvhny/Td2137tL+o2ZvWtmy5oezDCmDdlma4ekaU0OZhjJbbw76SvbjHfNfdfK9udV8Qbd1y1w9z+T9H1JPy6ernYlH3wN1k29059J+rYG9wDsl/RQk4MpthlfI+l2d98/tNbkfTfMuDpyvzUR9u2Szhvy/czisq7g7tuLz7skPavBlx3dZOfpHXSLz7saHs+X3H2nuw+4+ylJP1eD912xzfgaSb9w92eKixu/74YbV6futybC/o6k2Wb2LTM7U9IPJT3fwDi+xszGF2+cyMzGS7pC3bcV9fOSbiq+vknSrxocy+/plm28y7YZV8P3XePbn7t7xz8kXaXBd+Q/lvT3TYyhZFwXSvrv4uODpscm6SkNPq07ocH3Nn4k6WxJr0raLOk/JE3porH9mwa39n5fg8Ga3tDYFmjwKfr7ktYVH1c1fd8F4+rI/cbpskAmeIMOyARhBzJB2IFMEHYgE4QdyARhBzJB2IFM/B+tIjCppYWKvAAAAABJRU5ErkJggg==\n", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] @@ -347,12 +348,12 @@ "input_tensor = onnx.load_tensor_from_string(raw_i)\n", "input_tensor_npy = nph.to_array(input_tensor)\n", "input_tensor_pyt = torch.from_numpy(input_tensor_npy).float()\n", - "imgplot = plt.imshow(input_tensor_npy.reshape(28,28))" + "imgplot = plt.imshow(input_tensor_npy.reshape(28,28), cmap='gray')" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -362,7 +363,7 @@ " 1.1795e-04, 5.0158e-05, 1.0517e-01, 2.4597e-05])" ] }, - "execution_count": 7, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -377,12 +378,12 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 12, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEICAYAAABS0fM3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAa3klEQVR4nO3debxdZXn28d9FEgRCBCFxgASIGCxxeAuNgKJCRSqoQB1qwRcrVkXfFkVBK1oriIqvI7WKVgQnZCgC2qABxFdE3yKBMBsmQwQSQAlzGEoYrv6xni2bk3P2WSRZ68BZ1/fz2Z+z13jfe59z1r3X86z1bNkmIiK6a62xTiAiIsZWCkFERMelEEREdFwKQUREx6UQRER0XApBRETHpRBE1CTpMEk/KM83k3SvpAmrsJ+PSTpmzWc4MOaOkn5Xcv7rNmPHk18KwTgk6XpJrx5m/s6SHi0Hg97j9L7lW0n6oaTbJN0t6XJJB63KwW5I3AMkLZD0oKTvPsFt3yLpPEn3S/rlKOv2v77lkq6R9I7VyX0ktm+0vb7tR2rktHTItkfYflcTeQ1wOPC1kvOPV3dnkr4r6dNrIK94Ekgh6J6by8Gg99gDQNKWwHxgCfAi2xsAfwPMAaasbkzg08C3V2HbO4B/Bf5v3Vi21weeDnwE+Jak2UNXkjRxFXJ5KtscWLgqG3bwveqcFILo+SRwnu2DbN8CYPsa22+1fdfQlSX9paQr+qbPlnRh3/Sve00Qtk8rn0JvH2Y/z5D0E0nLJN1Znk/vLbf9c9snUxWT2lz5MXAnMFvSFpIs6Z2SbgR+UeLvUM447pJ0maSd+3KbKenccnZxNjC1b1lvfxPL9EaSviPp5vI6fixpMnAGsEnfGdgm/U1MZds9JS0sOfxS0tZ9y66X9KFydna3pP+QtE5ZNrW8X3dJuqO85yv9T0u6DngucHrJ4Wklj7llu0WS3t23/mGSTpH0A0n3APsNeq/73ot3SFpSXv97Jb2k5H2XpK/1rb+lpF9Iur2cfR4vacO+5dtKuqS87z8sr/nTfctfL+nSst/zJL14UH4xuhSC6Hk1cMoTWP98YFY5GE0CXkx1wJsiaV2qM4lf19jPWsB3qD6xbgY8AHxt4BY1SFpL0huADYEr+hbtBGwNvEbSpsBPqc5WNgI+BJwqaVpZ9wTgIqoC8Cng7QNCHgesB7wAeCZwpO37gN15/FnY4wqapK2AE4EPANOAeVQH7LX7VnsLsBswk+p93q/MPxhYWrZ7FvAxYKUxY2xvCdwI7FFyeBA4qWy7CfBm4AhJr+rbbC+qv4cNgeMHvO5+2wOzgL+lOov7Z6q/qxcAb5G0U+9lA58tsbcGZgCHlfdjbeBHwHepficnAm/oBZC0DdWZ5XuAjYFvAnMlPa1mjjGMFILu2aR8kuo93lLmbwzcUncnth8ALgReCfwFcBnwX8COwA7A72yvdAYwzH5ut32q7fttLwc+Q3WwXlWbSLoLuA04FHib7Wv6lh9m+76S/77APNvzbD9q+2xgAfBaSZsBLwH+xfaDtn8FnM4wJD2H6oD/Xtt32n7I9rk18/1b4Ke2z7b9EPBFYF3gZX3r/Jvtm23fUXL48zL/IeA5wOYl5q9dY/AwSTOofk8fsf3fti8FjgH+rm+139j+cXlfHqj5Wj5V9vcz4D7gRNu32r6J6kPBNgC2F5XX+6DtZcCXeex3vgMwsbzmh2yfBlzQF2N/4Ju259t+xPb3gAfLdrGK0vbXPTfbnj7M/NupDirDkvTvVAdOgCNsHwGcC+xM9cnyXKpmmJ2o/jFrHQglrQccSfWJ9xll9hRJE0briB3BSK+vZ0nf882Bv5G0R9+8ScA5VJ9W7yyf6ntuoPr0OtQM4A7bd65CvpuU/QJg+1FJS4BN+9b5Q9/z+8s2AF+g+iT9M0kAR9uu05eyScl3ed+8G6jO4nqW8MT9se/5A8NMrw8g6VnAV4BXUPU/rUX1t9PL7aYhBW3o7+ztkt7XN29tHntPYhXkjCB6fg68aaSFtt/b17xxRJndKwSvLM/PpSoEO1GzEFA1bzwf2N7208u+oGo+aMLQA8xxtjfse0wuB9NbgGeUdv6ezUbY5xJgo/527hHiDedmqoMbAKqO6DOAm0Z9IfZy2wfbfi6wJ3CQpF1G267E3EhS/0UAmw2J2eSwxEeU/b+o/M735bHf9y3ApuV96OkvvkuAzwz5na1n+8QG8x33UgjGr0mS1ul7jHb2dyjwMklfkPRsAEnPKx2Gwx3gAM6jOohvB1xgeyHVQW174Fe9lSRNLB2cE4AJQ/KZQvVp8S5JG5U86Nt2Qtl2IrBW2XbSE3gfBvkBsIek1/TiqLrcc7rtG6iaiT4paW1JLwf2GG4npXP9DODrqjq/J0nqFbQ/AhtL2mCEHE4GXidpl/K6DqY6ozpvtORLp+nzykHzbuAR4NHRtrO9pOz/s+U1vxh4Z3k/2jAFuBe4u/TTfLhv2W+oXscB5e9mL6q/r55vAe+VtL0qkyW9bkhRiycohWD8mkd1gO09Dhu0su3rgJcCWwALJd0NnEp1MFw+wjb3ARcDC22vKLN/A9xg+9a+VT9ecjiE6tPfA2UeVJ2K61K16Z8PnDkkzNvK+t+gakp4gOpgsNrKAXEvqk7WZVSfNj/MY/8Xb6UqandQFajvD9jd26ja7K8GbqXq/MX21VQdnotLn8zjmjBK/8W+wFep3oM9qDp1VzC6WVRncvdSve9ft31Oje0A9qH6Xd9M1Tl7qO2f19x2dX0S2JaqeP0UOK23oLzuN1IVpruo3pufUBVHbC8A3k11QcGdwCJGuaopRqd8MU1EPJlJmg/8u+3vjHUu41XOCCLiSUXSTpKeXZqG3k51yezQM8VYg3LVUEQ82Tyfqu9kMrAYeHPvJsdoRpqGIiI6Lk1DEREd95RrGpo6daq32GKLsU4jIuIp5aKLLrrN9rThlj3lCsEWW2zBggULxjqNiIinFEk3jLQsTUMRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHfeUu7M4nrgjz7628Rgf3HWrxmNERDNyRhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxjRYCSbtJukbSIkmHDLN8M0nnSLpE0uWSXttkPhERsbLGCoGkCcBRwO7AbGAfSbOHrPZx4GTb2wB7A19vKp+IiBhek2cE2wGLbC+2vQI4CdhryDoGnl6ebwDc3GA+ERExjCYLwabAkr7ppWVev8OAfSUtBeYB7xtuR5L2l7RA0oJly5Y1kWtERGeNdWfxPsB3bU8HXgscJ2mlnGwfbXuO7TnTpk1rPcmIiPGsyUJwEzCjb3p6mdfvncDJALZ/A6wDTG0wp4iIGKLJQnAhMEvSTElrU3UGzx2yzo3ALgCStqYqBGn7iYhoUWOFwPbDwAHAWcBVVFcHLZR0uKQ9y2oHA++WdBlwIrCfbTeVU0RErGxikzu3PY+qE7h/3if6nl8J7NhkDhERMdhYdxZHRMQYSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI4btRBIelEbiURExNioc0bwdUkXSPoHSRs0nlFERLRq1EJg+xXA/wZmABdJOkHSro1nFhERrajVR2D7d8DHgY8AOwH/JulqSW9sMrmIiGhenT6CF0s6ErgKeBWwh+2ty/MjG84vIiIaNrHGOl8FjgE+ZvuB3kzbN0v6eGOZRUREK+o0Df3I9nH9RUDSgQC2j2sss4iIaEWdQvB3w8zbbw3nERERY2TEpiFJ+wBvBWZKmtu3aApwR9OJRUREOwb1EZwH3AJMBb7UN385cHmTSUVERHtGLAS2bwBuAF7aXjoREdG2QU1D/9/2yyUtB9y/CLDtpzeeXURENG7QGcHLy88p7aUTERFtG3RGsNGgDW2nwzgiYhwY1Fl8EVWTkIZZZuC5jWQUERGtGtQ0NLPNRCIiYmyMeEOZpD8rP7cd7lFn55J2k3SNpEWSDhlhnbdIulLSQkknrNrLiIiIVTWoaeggYH8efw9Bj6kGnRuRpAnAUcCuwFLgQklzbV/Zt84s4KPAjrbvlPTMJ5h/RESspkFNQ/uXn3+5ivveDlhkezGApJOAvYAr+9Z5N3CU7TtLrFtXMVZERKyiOsNQryPpIEmnSTpV0gckrVNj35sCS/qml5Z5/bYCtpL0X5LOl7TbCDnsL2mBpAXLli2rEToiIuqqM+jc94EXUA1H/bXyfE2NOjoRmAXsDOwDfEvShkNXsn207Tm250ybNm0NhY6ICKj3fQQvtD27b/ocSVeOuPZjbqL6esue6WVev6XAfNsPAb+XdC1VYbiwxv4jImINqHNGcLGkHXoTkrYHFtTY7kJglqSZktYG9gbmDlnnx1RnA0iaStVUtLjGviMiYg0ZdGfxFVRXB00CzpN0Y5neHLh6tB3bfljSAcBZwATg27YXSjocWGB7bln2V+UM4xHgw7ZvX90XFRER9Q1qGnr96u7c9jxg3pB5n+h7bqrLVA9a3VgREbFqRhuG+k/KNf51rhaKiIinkDqXj+4p6XfA74FzgeuBMxrOKyIiWlKns/hTwA7AtWX8oV2A8xvNKiIiWlOnEDxUOnDXkrSW7XOAOQ3nFRERLalzH8FdktYHfg0cL+lW4L5m04qIiLbUOSPYC3gA+ABwJnAdsEeTSUVERHtGPSOwfZ+kZ1MNIncHcFau9Y+IGD/qXDX0LuAC4I3Am4HzJf1904lFREQ76vQRfBjYpncWIGlj4Dzg200mFhER7ajTR3A7sLxvenmZFxER48CgsYZ6wz4sAuZL+k+qsYb2Ai5vIbeIiGjBoKahKeXndeXR85/NpRMREW0bNNbQJ/uny70E2L636aQiIqI9da4aeqGkS4CFwEJJF0l6QfOpRUREG+p0Fh8NHGR7c9ubAwcD32o2rYiIaEudQjC5jC8EgO1fApMbyygiIlpV5z6CxZL+hce+sH5f8nWSERHjRp0zgr8HpgGnAacCU8u8iIgYBwaeEUiaAPyz7fe3lE9ERLRs4BmB7UeAl7eUS0REjIE6fQSXSJoL/JC+7yGwfVpjWUVERGvqFIJ1qMYWelXfPFP1GURExFNcrdFHbd/WeCYRETEmRuwjkLSHpGXA5ZKWSnpZi3lFRERLBnUWfwZ4he1NgDcBn20npYiIaNOgQvCw7asBbM/nsdFIIyJiHBnUR/DMvu8kWGna9pebSysiItoyqBB8i8efBQydjoiIcaD29xFERMT4VGesoYiIGMdSCCIiOi6FICKi40bsIxhyxdBKctVQRMT4MOiqod4VQs8HXgLMLdN7ABc0mVRERLRn1KuGJP0K2Nb28jJ9GPDTVrKLiIjG1ekjeBawom96RZkXERHjQJ3RR78PXCDpR2X6r4HvNZdSRES0adRCYPszks4AXlFmvcP2Jc2mFRERbal7+eh6wD22vwIslTSzzkaSdpN0jaRFkg4ZsN6bJFnSnJr5RETEGjJqIZB0KPAR4KNl1iTgBzW2mwAcBewOzAb2kTR7mPWmAAcC8+unHRERa0qdM4I3AHtSvq/Y9s3UG3xuO2CR7cW2VwAnAXsNs96ngM8B/10r44iIWKPqFIIVtk31PcVImlxz35sCS/qml5Z5fyJpW2CG7YGXo0raX9ICSQuWLVtWM3xERNRRpxCcLOmbwIaS3g38HDhmdQNLWgv4MnDwaOvaPtr2HNtzpk2btrqhIyKiT52rhr4oaVfgHqq7jD9h++wa+74JmNE3Pb3M65kCvBD4pSSAZwNzJe1pe0HN/CMiYjWNWggkfc72R4Czh5k3yIXArHKF0U3A3sBbewtt3w1M7dvnL4EPpQhERLSrTtPQrsPM2320jWw/DBwAnAVcBZxse6GkwyXt+cTSjIiIpgwaffT/AP8AbCnp8r5FU4Dz6uzc9jxg3pB5nxhh3Z3r7DMiItasQU1DJwBnAJ8F+m8GW277jkazioiI1ozYNGT7btvXA18B7rB9g+0bgIclbd9WghER0aw6fQTfAO7tm763zIuIiHGgTiFQuaEMANuPUm/U0oiIeAqoUwgWS3q/pEnlcSCwuOnEIiKiHXUKwXuBl1HdC7AU2B7Yv8mkIiKiPXXuLL6V6mawiIgYhwbdR/BPtj8v6auUAef62X5/o5lFREQrBp0RXFV+ZsiHiIhxbMRCYPv08jPfTxwRMY4Naho6nWGahHpsZ7ygiIhxYFDT0BfLzzdSDRHd+3rKfYA/NplURES0Z1DT0LkAkr5ku/9L5U+XlH6DiIhxos59BJMlPbc3Ub5foO7XVUZExJNcnaEiPkj1LWKLAQGbA+9pNKuIiGhNnRvKzpQ0C/izMutq2w82m1ZERLRl1KYhSesBHwYOsH0ZsJmk1zeeWUREtKJOH8F3gBXAS8v0TcCnG8soIiJaVacQbGn788BDALbvp+oriIiIcaBOIVghaV3KzWWStgTSRxARMU7UuWroUOBMYIak44Edgf2aTCoiItozsBBIEnA11d3FO1A1CR1o+7YWcouIiBYMLAS2LWme7RcBP20pp4iIaFGdPoKLJb2k8UwiImJM1Okj2B7YV9L1wH1UzUO2/eImE4uIiHbUKQSvaTyLiIgYM4O+j2Adqi+ufx5wBXCs7YfbSiwiItoxqI/ge8AcqiKwO/ClVjKKiIhWDWoaml2uFkLSscAF7aQUERFtGnRG8FDvSZqEIiLGr0FnBP9L0j3luYB1y3TvqqGnN55dREQ0btBXVU5oM5GIiBgbdW4oi4iIcSyFICKi41IIIiI6LoUgIqLjUggiIjqu0UIgaTdJ10haJOmQYZYfJOlKSZdL+n+SNm8yn4iIWFljhUDSBOAoquEpZgP7SJo9ZLVLgDllJNNTgM83lU9ERAyvyTOC7YBFthfbXgGcBOzVv4Ltc2zfXybPB6Y3mE9ERAyjyUKwKbCkb3ppmTeSdwJnDLdA0v6SFkhasGzZsjWYYkREPCk6iyXtSzXS6ReGW277aNtzbM+ZNm1au8lFRIxzdb6YZlXdBMzom55e5j2OpFcD/wzsZPvBBvOJiIhhNHlGcCEwS9JMSWsDewNz+1eQtA3wTWBP27c2mEtERIygsUJQhq4+ADgLuAo42fZCSYdL2rOs9gVgfeCHki6VNHeE3UVEREOabBrC9jxg3pB5n+h7/uom40dExOieFJ3FERExdlIIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjqu0S+miYho05FnX9t4jA/uulXjMdqWM4KIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjmu0EEjaTdI1khZJOmSY5U+T9B9l+XxJWzSZT0RErKyxQiBpAnAUsDswG9hH0uwhq70TuNP284Ajgc81lU9ERAxvYoP73g5YZHsxgKSTgL2AK/vW2Qs4rDw/BfiaJNl2Ewkdefa1Tez2cT6461aNx4iIWJOaLASbAkv6ppcC24+0ju2HJd0NbAzc1r+SpP2B/cvkvZKuaSTj4U0dms8gB41h7DUsrzuxE3sYa/Bvve3XvflIC5osBGuM7aOBo8citqQFtuckdmIndmKPl9hDNdlZfBMwo296epk37DqSJgIbALc3mFNERAzRZCG4EJglaaaktYG9gblD1pkLvL08fzPwi6b6ByIiYniNNQ2VNv8DgLOACcC3bS+UdDiwwPZc4FjgOEmLgDuoisWTzZg0SSV2Yid2YrdF+QAeEdFtubM4IqLjUggiIjouhWAEow2P0XDsb0u6VdJvW447Q9I5kq6UtFDSgS3GXkfSBZIuK7E/2VbsvhwmSLpE0k/GIPb1kq6QdKmkBS3H3lDSKZKulnSVpJe2FPf55fX2HvdI+kAbsUv8D5a/td9KOlHSOi3GPrDEXdjmax6R7TyGPKg6t68DngusDVwGzG4x/iuBbYHftvy6nwNsW55PAa5t63UDAtYvzycB84EdWn79BwEnAD9pM26JfT0wte24Jfb3gHeV52sDG45BDhOAPwCbtxRvU+D3wLpl+mRgv5ZivxD4LbAe1QU7PweeNxa/+94jZwTD+9PwGLZXAL3hMVph+1dUV1G1yvYtti8uz5cDV1H9w7QR27bvLZOTyqO1KxkkTQdeBxzTVswnA0kbUH3wOBbA9grbd41BKrsA19m+ocWYE4F1yz1M6wE3txR3a2C+7fttPwycC7yxpdjDSiEY3nDDY7RyQHyyKCPBbkP1ybytmBMkXQrcCpxtu7XYwL8C/wQ82mLMfgZ+JumiMqRKW2YCy4DvlGaxYyRNbjF+z97AiW0Fs30T8EXgRuAW4G7bP2sp/G+BV0jaWNJ6wGt5/M23rUshiJVIWh84FfiA7Xvaimv7Edt/TnUX+naSXthGXEmvB261fVEb8UbwctvbUo3W+4+SXtlS3IlUzZDfsL0NcB/Qdp/Y2sCewA9bjPkMqrP8mcAmwGRJ+7YR2/ZVVCMt/ww4E7gUeKSN2CNJIRheneExxiVJk6iKwPG2TxuLHErTxDnAbi2F3BHYU9L1VM2Ar5L0g5ZiA3/6hIrtW4EfUTVPtmEpsLTv7OsUqsLQpt2Bi23/scWYrwZ+b3uZ7YeA04CXtRXc9rG2/8L2K4E7qfrjxkwKwfDqDI8x7kgSVVvxVba/3HLsaZI2LM/XBXYFrm4jtu2P2p5uewuq3/UvbLfy6RBA0mRJU3rPgb+iaj5onO0/AEskPb/M2oXHDxXfhn1osVmouBHYQdJ65e9+F6o+sVZIemb5uRlV/8AJbcUezlNi9NG2eYThMdqKL+lEYGdgqqSlwKG2j20h9I7A24ArSls9wMdsz2sh9nOA75UvNFoLONl265dxjpFnAT+qjkdMBE6wfWaL8d8HHF8+9CwG3tFW4FL4dgXe01ZMANvzJZ0CXAw8DFxCu0M+nCppY+Ah4B/HqIP+TzLEREREx6VpKCKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi4/4HEHMv4f97kiwAAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEICAYAAABS0fM3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAa3klEQVR4nO3debxdZXn28d9FEgRCBCFxgASIGCxxeAuNgKJCRSqoQB1qwRcrVkXfFkVBK1oriIqvI7WKVgQnZCgC2qABxFdE3yKBMBsmQwQSQAlzGEoYrv6xni2bk3P2WSRZ68BZ1/fz2Z+z13jfe59z1r3X86z1bNkmIiK6a62xTiAiIsZWCkFERMelEEREdFwKQUREx6UQRER0XApBRETHpRBE1CTpMEk/KM83k3SvpAmrsJ+PSTpmzWc4MOaOkn5Xcv7rNmPHk18KwTgk6XpJrx5m/s6SHi0Hg97j9L7lW0n6oaTbJN0t6XJJB63KwW5I3AMkLZD0oKTvPsFt3yLpPEn3S/rlKOv2v77lkq6R9I7VyX0ktm+0vb7tR2rktHTItkfYflcTeQ1wOPC1kvOPV3dnkr4r6dNrIK94Ekgh6J6by8Gg99gDQNKWwHxgCfAi2xsAfwPMAaasbkzg08C3V2HbO4B/Bf5v3Vi21weeDnwE+Jak2UNXkjRxFXJ5KtscWLgqG3bwveqcFILo+SRwnu2DbN8CYPsa22+1fdfQlSX9paQr+qbPlnRh3/Sve00Qtk8rn0JvH2Y/z5D0E0nLJN1Znk/vLbf9c9snUxWT2lz5MXAnMFvSFpIs6Z2SbgR+UeLvUM447pJ0maSd+3KbKenccnZxNjC1b1lvfxPL9EaSviPp5vI6fixpMnAGsEnfGdgm/U1MZds9JS0sOfxS0tZ9y66X9KFydna3pP+QtE5ZNrW8X3dJuqO85yv9T0u6DngucHrJ4Wklj7llu0WS3t23/mGSTpH0A0n3APsNeq/73ot3SFpSXv97Jb2k5H2XpK/1rb+lpF9Iur2cfR4vacO+5dtKuqS87z8sr/nTfctfL+nSst/zJL14UH4xuhSC6Hk1cMoTWP98YFY5GE0CXkx1wJsiaV2qM4lf19jPWsB3qD6xbgY8AHxt4BY1SFpL0huADYEr+hbtBGwNvEbSpsBPqc5WNgI+BJwqaVpZ9wTgIqoC8Cng7QNCHgesB7wAeCZwpO37gN15/FnY4wqapK2AE4EPANOAeVQH7LX7VnsLsBswk+p93q/MPxhYWrZ7FvAxYKUxY2xvCdwI7FFyeBA4qWy7CfBm4AhJr+rbbC+qv4cNgeMHvO5+2wOzgL+lOov7Z6q/qxcAb5G0U+9lA58tsbcGZgCHlfdjbeBHwHepficnAm/oBZC0DdWZ5XuAjYFvAnMlPa1mjjGMFILu2aR8kuo93lLmbwzcUncnth8ALgReCfwFcBnwX8COwA7A72yvdAYwzH5ut32q7fttLwc+Q3WwXlWbSLoLuA04FHib7Wv6lh9m+76S/77APNvzbD9q+2xgAfBaSZsBLwH+xfaDtn8FnM4wJD2H6oD/Xtt32n7I9rk18/1b4Ke2z7b9EPBFYF3gZX3r/Jvtm23fUXL48zL/IeA5wOYl5q9dY/AwSTOofk8fsf3fti8FjgH+rm+139j+cXlfHqj5Wj5V9vcz4D7gRNu32r6J6kPBNgC2F5XX+6DtZcCXeex3vgMwsbzmh2yfBlzQF2N/4Ju259t+xPb3gAfLdrGK0vbXPTfbnj7M/NupDirDkvTvVAdOgCNsHwGcC+xM9cnyXKpmmJ2o/jFrHQglrQccSfWJ9xll9hRJE0briB3BSK+vZ0nf882Bv5G0R9+8ScA5VJ9W7yyf6ntuoPr0OtQM4A7bd65CvpuU/QJg+1FJS4BN+9b5Q9/z+8s2AF+g+iT9M0kAR9uu05eyScl3ed+8G6jO4nqW8MT9se/5A8NMrw8g6VnAV4BXUPU/rUX1t9PL7aYhBW3o7+ztkt7XN29tHntPYhXkjCB6fg68aaSFtt/b17xxRJndKwSvLM/PpSoEO1GzEFA1bzwf2N7208u+oGo+aMLQA8xxtjfse0wuB9NbgGeUdv6ezUbY5xJgo/527hHiDedmqoMbAKqO6DOAm0Z9IfZy2wfbfi6wJ3CQpF1G267E3EhS/0UAmw2J2eSwxEeU/b+o/M735bHf9y3ApuV96OkvvkuAzwz5na1n+8QG8x33UgjGr0mS1ul7jHb2dyjwMklfkPRsAEnPKx2Gwx3gAM6jOohvB1xgeyHVQW174Fe9lSRNLB2cE4AJQ/KZQvVp8S5JG5U86Nt2Qtl2IrBW2XbSE3gfBvkBsIek1/TiqLrcc7rtG6iaiT4paW1JLwf2GG4npXP9DODrqjq/J0nqFbQ/AhtL2mCEHE4GXidpl/K6DqY6ozpvtORLp+nzykHzbuAR4NHRtrO9pOz/s+U1vxh4Z3k/2jAFuBe4u/TTfLhv2W+oXscB5e9mL6q/r55vAe+VtL0qkyW9bkhRiycohWD8mkd1gO09Dhu0su3rgJcCWwALJd0NnEp1MFw+wjb3ARcDC22vKLN/A9xg+9a+VT9ecjiE6tPfA2UeVJ2K61K16Z8PnDkkzNvK+t+gakp4gOpgsNrKAXEvqk7WZVSfNj/MY/8Xb6UqandQFajvD9jd26ja7K8GbqXq/MX21VQdnotLn8zjmjBK/8W+wFep3oM9qDp1VzC6WVRncvdSve9ft31Oje0A9qH6Xd9M1Tl7qO2f19x2dX0S2JaqeP0UOK23oLzuN1IVpruo3pufUBVHbC8A3k11QcGdwCJGuaopRqd8MU1EPJlJmg/8u+3vjHUu41XOCCLiSUXSTpKeXZqG3k51yezQM8VYg3LVUEQ82Tyfqu9kMrAYeHPvJsdoRpqGIiI6Lk1DEREd95RrGpo6daq32GKLsU4jIuIp5aKLLrrN9rThlj3lCsEWW2zBggULxjqNiIinFEk3jLQsTUMRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHfeUu7M4nrgjz7628Rgf3HWrxmNERDNyRhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxjRYCSbtJukbSIkmHDLN8M0nnSLpE0uWSXttkPhERsbLGCoGkCcBRwO7AbGAfSbOHrPZx4GTb2wB7A19vKp+IiBhek2cE2wGLbC+2vQI4CdhryDoGnl6ebwDc3GA+ERExjCYLwabAkr7ppWVev8OAfSUtBeYB7xtuR5L2l7RA0oJly5Y1kWtERGeNdWfxPsB3bU8HXgscJ2mlnGwfbXuO7TnTpk1rPcmIiPGsyUJwEzCjb3p6mdfvncDJALZ/A6wDTG0wp4iIGKLJQnAhMEvSTElrU3UGzx2yzo3ALgCStqYqBGn7iYhoUWOFwPbDwAHAWcBVVFcHLZR0uKQ9y2oHA++WdBlwIrCfbTeVU0RErGxikzu3PY+qE7h/3if6nl8J7NhkDhERMdhYdxZHRMQYSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI4btRBIelEbiURExNioc0bwdUkXSPoHSRs0nlFERLRq1EJg+xXA/wZmABdJOkHSro1nFhERrajVR2D7d8DHgY8AOwH/JulqSW9sMrmIiGhenT6CF0s6ErgKeBWwh+2ty/MjG84vIiIaNrHGOl8FjgE+ZvuB3kzbN0v6eGOZRUREK+o0Df3I9nH9RUDSgQC2j2sss4iIaEWdQvB3w8zbbw3nERERY2TEpiFJ+wBvBWZKmtu3aApwR9OJRUREOwb1EZwH3AJMBb7UN385cHmTSUVERHtGLAS2bwBuAF7aXjoREdG2QU1D/9/2yyUtB9y/CLDtpzeeXURENG7QGcHLy88p7aUTERFtG3RGsNGgDW2nwzgiYhwY1Fl8EVWTkIZZZuC5jWQUERGtGtQ0NLPNRCIiYmyMeEOZpD8rP7cd7lFn55J2k3SNpEWSDhlhnbdIulLSQkknrNrLiIiIVTWoaeggYH8efw9Bj6kGnRuRpAnAUcCuwFLgQklzbV/Zt84s4KPAjrbvlPTMJ5h/RESspkFNQ/uXn3+5ivveDlhkezGApJOAvYAr+9Z5N3CU7TtLrFtXMVZERKyiOsNQryPpIEmnSTpV0gckrVNj35sCS/qml5Z5/bYCtpL0X5LOl7TbCDnsL2mBpAXLli2rEToiIuqqM+jc94EXUA1H/bXyfE2NOjoRmAXsDOwDfEvShkNXsn207Tm250ybNm0NhY6ICKj3fQQvtD27b/ocSVeOuPZjbqL6esue6WVev6XAfNsPAb+XdC1VYbiwxv4jImINqHNGcLGkHXoTkrYHFtTY7kJglqSZktYG9gbmDlnnx1RnA0iaStVUtLjGviMiYg0ZdGfxFVRXB00CzpN0Y5neHLh6tB3bfljSAcBZwATg27YXSjocWGB7bln2V+UM4xHgw7ZvX90XFRER9Q1qGnr96u7c9jxg3pB5n+h7bqrLVA9a3VgREbFqRhuG+k/KNf51rhaKiIinkDqXj+4p6XfA74FzgeuBMxrOKyIiWlKns/hTwA7AtWX8oV2A8xvNKiIiWlOnEDxUOnDXkrSW7XOAOQ3nFRERLalzH8FdktYHfg0cL+lW4L5m04qIiLbUOSPYC3gA+ABwJnAdsEeTSUVERHtGPSOwfZ+kZ1MNIncHcFau9Y+IGD/qXDX0LuAC4I3Am4HzJf1904lFREQ76vQRfBjYpncWIGlj4Dzg200mFhER7ajTR3A7sLxvenmZFxER48CgsYZ6wz4sAuZL+k+qsYb2Ai5vIbeIiGjBoKahKeXndeXR85/NpRMREW0bNNbQJ/uny70E2L636aQiIqI9da4aeqGkS4CFwEJJF0l6QfOpRUREG+p0Fh8NHGR7c9ubAwcD32o2rYiIaEudQjC5jC8EgO1fApMbyygiIlpV5z6CxZL+hce+sH5f8nWSERHjRp0zgr8HpgGnAacCU8u8iIgYBwaeEUiaAPyz7fe3lE9ERLRs4BmB7UeAl7eUS0REjIE6fQSXSJoL/JC+7yGwfVpjWUVERGvqFIJ1qMYWelXfPFP1GURExFNcrdFHbd/WeCYRETEmRuwjkLSHpGXA5ZKWSnpZi3lFRERLBnUWfwZ4he1NgDcBn20npYiIaNOgQvCw7asBbM/nsdFIIyJiHBnUR/DMvu8kWGna9pebSysiItoyqBB8i8efBQydjoiIcaD29xFERMT4VGesoYiIGMdSCCIiOi6FICKi40bsIxhyxdBKctVQRMT4MOiqod4VQs8HXgLMLdN7ABc0mVRERLRn1KuGJP0K2Nb28jJ9GPDTVrKLiIjG1ekjeBawom96RZkXERHjQJ3RR78PXCDpR2X6r4HvNZdSRES0adRCYPszks4AXlFmvcP2Jc2mFRERbal7+eh6wD22vwIslTSzzkaSdpN0jaRFkg4ZsN6bJFnSnJr5RETEGjJqIZB0KPAR4KNl1iTgBzW2mwAcBewOzAb2kTR7mPWmAAcC8+unHRERa0qdM4I3AHtSvq/Y9s3UG3xuO2CR7cW2VwAnAXsNs96ngM8B/10r44iIWKPqFIIVtk31PcVImlxz35sCS/qml5Z5fyJpW2CG7YGXo0raX9ICSQuWLVtWM3xERNRRpxCcLOmbwIaS3g38HDhmdQNLWgv4MnDwaOvaPtr2HNtzpk2btrqhIyKiT52rhr4oaVfgHqq7jD9h++wa+74JmNE3Pb3M65kCvBD4pSSAZwNzJe1pe0HN/CMiYjWNWggkfc72R4Czh5k3yIXArHKF0U3A3sBbewtt3w1M7dvnL4EPpQhERLSrTtPQrsPM2320jWw/DBwAnAVcBZxse6GkwyXt+cTSjIiIpgwaffT/AP8AbCnp8r5FU4Dz6uzc9jxg3pB5nxhh3Z3r7DMiItasQU1DJwBnAJ8F+m8GW277jkazioiI1ozYNGT7btvXA18B7rB9g+0bgIclbd9WghER0aw6fQTfAO7tm763zIuIiHGgTiFQuaEMANuPUm/U0oiIeAqoUwgWS3q/pEnlcSCwuOnEIiKiHXUKwXuBl1HdC7AU2B7Yv8mkIiKiPXXuLL6V6mawiIgYhwbdR/BPtj8v6auUAef62X5/o5lFREQrBp0RXFV+ZsiHiIhxbMRCYPv08jPfTxwRMY4Naho6nWGahHpsZ7ygiIhxYFDT0BfLzzdSDRHd+3rKfYA/NplURES0Z1DT0LkAkr5ku/9L5U+XlH6DiIhxos59BJMlPbc3Ub5foO7XVUZExJNcnaEiPkj1LWKLAQGbA+9pNKuIiGhNnRvKzpQ0C/izMutq2w82m1ZERLRl1KYhSesBHwYOsH0ZsJmk1zeeWUREtKJOH8F3gBXAS8v0TcCnG8soIiJaVacQbGn788BDALbvp+oriIiIcaBOIVghaV3KzWWStgTSRxARMU7UuWroUOBMYIak44Edgf2aTCoiItozsBBIEnA11d3FO1A1CR1o+7YWcouIiBYMLAS2LWme7RcBP20pp4iIaFGdPoKLJb2k8UwiImJM1Okj2B7YV9L1wH1UzUO2/eImE4uIiHbUKQSvaTyLiIgYM4O+j2Adqi+ufx5wBXCs7YfbSiwiItoxqI/ge8AcqiKwO/ClVjKKiIhWDWoaml2uFkLSscAF7aQUERFtGnRG8FDvSZqEIiLGr0FnBP9L0j3luYB1y3TvqqGnN55dREQ0btBXVU5oM5GIiBgbdW4oi4iIcSyFICKi41IIIiI6LoUgIqLjUggiIjqu0UIgaTdJ10haJOmQYZYfJOlKSZdL+n+SNm8yn4iIWFljhUDSBOAoquEpZgP7SJo9ZLVLgDllJNNTgM83lU9ERAyvyTOC7YBFthfbXgGcBOzVv4Ltc2zfXybPB6Y3mE9ERAyjyUKwKbCkb3ppmTeSdwJnDLdA0v6SFkhasGzZsjWYYkREPCk6iyXtSzXS6ReGW277aNtzbM+ZNm1au8lFRIxzdb6YZlXdBMzom55e5j2OpFcD/wzsZPvBBvOJiIhhNHlGcCEwS9JMSWsDewNz+1eQtA3wTWBP27c2mEtERIygsUJQhq4+ADgLuAo42fZCSYdL2rOs9gVgfeCHki6VNHeE3UVEREOabBrC9jxg3pB5n+h7/uom40dExOieFJ3FERExdlIIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjqu0S+miYho05FnX9t4jA/uulXjMdqWM4KIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjmu0EEjaTdI1khZJOmSY5U+T9B9l+XxJWzSZT0RErKyxQiBpAnAUsDswG9hH0uwhq70TuNP284Ajgc81lU9ERAxvYoP73g5YZHsxgKSTgL2AK/vW2Qs4rDw/BfiaJNl2Ewkdefa1Tez2cT6461aNx4iIWJOaLASbAkv6ppcC24+0ju2HJd0NbAzc1r+SpP2B/cvkvZKuaSTj4U0dms8gB41h7DUsrzuxE3sYa/Bvve3XvflIC5osBGuM7aOBo8citqQFtuckdmIndmKPl9hDNdlZfBMwo296epk37DqSJgIbALc3mFNERAzRZCG4EJglaaaktYG9gblD1pkLvL08fzPwi6b6ByIiYniNNQ2VNv8DgLOACcC3bS+UdDiwwPZc4FjgOEmLgDuoisWTzZg0SSV2Yid2YrdF+QAeEdFtubM4IqLjUggiIjouhWAEow2P0XDsb0u6VdJvW447Q9I5kq6UtFDSgS3GXkfSBZIuK7E/2VbsvhwmSLpE0k/GIPb1kq6QdKmkBS3H3lDSKZKulnSVpJe2FPf55fX2HvdI+kAbsUv8D5a/td9KOlHSOi3GPrDEXdjmax6R7TyGPKg6t68DngusDVwGzG4x/iuBbYHftvy6nwNsW55PAa5t63UDAtYvzycB84EdWn79BwEnAD9pM26JfT0wte24Jfb3gHeV52sDG45BDhOAPwCbtxRvU+D3wLpl+mRgv5ZivxD4LbAe1QU7PweeNxa/+94jZwTD+9PwGLZXAL3hMVph+1dUV1G1yvYtti8uz5cDV1H9w7QR27bvLZOTyqO1KxkkTQdeBxzTVswnA0kbUH3wOBbA9grbd41BKrsA19m+ocWYE4F1yz1M6wE3txR3a2C+7fttPwycC7yxpdjDSiEY3nDDY7RyQHyyKCPBbkP1ybytmBMkXQrcCpxtu7XYwL8C/wQ82mLMfgZ+JumiMqRKW2YCy4DvlGaxYyRNbjF+z97AiW0Fs30T8EXgRuAW4G7bP2sp/G+BV0jaWNJ6wGt5/M23rUshiJVIWh84FfiA7Xvaimv7Edt/TnUX+naSXthGXEmvB261fVEb8UbwctvbUo3W+4+SXtlS3IlUzZDfsL0NcB/Qdp/Y2sCewA9bjPkMqrP8mcAmwGRJ+7YR2/ZVVCMt/ww4E7gUeKSN2CNJIRheneExxiVJk6iKwPG2TxuLHErTxDnAbi2F3BHYU9L1VM2Ar5L0g5ZiA3/6hIrtW4EfUTVPtmEpsLTv7OsUqsLQpt2Bi23/scWYrwZ+b3uZ7YeA04CXtRXc9rG2/8L2K4E7qfrjxkwKwfDqDI8x7kgSVVvxVba/3HLsaZI2LM/XBXYFrm4jtu2P2p5uewuq3/UvbLfy6RBA0mRJU3rPgb+iaj5onO0/AEskPb/M2oXHDxXfhn1osVmouBHYQdJ65e9+F6o+sVZIemb5uRlV/8AJbcUezlNi9NG2eYThMdqKL+lEYGdgqqSlwKG2j20h9I7A24ArSls9wMdsz2sh9nOA75UvNFoLONl265dxjpFnAT+qjkdMBE6wfWaL8d8HHF8+9CwG3tFW4FL4dgXe01ZMANvzJZ0CXAw8DFxCu0M+nCppY+Ah4B/HqIP+TzLEREREx6VpKCKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi4/4HEHMv4f97kiwAAAAASUVORK5CYII=\n", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] @@ -421,18 +422,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 13, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/brevitas_cnv_lfc/training_scripts/models/LFC.py:73: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n", - " x = 2.0 * x - torch.tensor([1.0])\n" - ] - } - ], + "outputs": [], "source": [ "import brevitas.onnx as bo\n", "export_onnx_path = \"/tmp/LFCW1A1.onnx\"\n", @@ -449,13 +441,15 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", "Serving '/tmp/LFCW1A1.onnx' at http://0.0.0.0:8081\n" ] } @@ -467,7 +461,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -506,7 +500,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 16, "metadata": {}, "outputs": [ { @@ -518,7 +512,7 @@ "op_type: \"MatMul\"" ] }, - "execution_count": 22, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -538,7 +532,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -553,7 +547,7 @@ " [-1., 1., 1., ..., -1., -1., 1.]], dtype=float32)" ] }, - "execution_count": 23, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -571,7 +565,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 18, "metadata": {}, "outputs": [ { @@ -580,7 +574,7 @@ "<DataType.BIPOLAR: 8>" ] }, - "execution_count": 24, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -591,7 +585,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -600,7 +594,7 @@ "[784, 1024]" ] }, - "execution_count": 25, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -618,7 +612,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -643,7 +637,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 21, "metadata": {}, "outputs": [ { @@ -673,7 +667,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 22, "metadata": {}, "outputs": [ { @@ -684,7 +678,7 @@ " dtype=float32)" ] }, - "execution_count": 18, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -700,7 +694,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 23, "metadata": {}, "outputs": [ { @@ -709,7 +703,7 @@ "True" ] }, - "execution_count": 19, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } diff --git a/notebooks/end2end_example/finn-design-flow-example.svg b/notebooks/end2end_example/finn-design-flow-example.svg new file mode 100755 index 0000000000000000000000000000000000000000..7d391fd9fe0b05634f422fab10aff7064b6f6fb8 --- /dev/null +++ b/notebooks/end2end_example/finn-design-flow-example.svg @@ -0,0 +1 @@ +<svg version="1.1" viewBox="0.0 0.0 1108.5013123359581 921.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l1108.5013 0l0 921.0l-1108.5013 0l0 -921.0z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l1108.5013 0l0 921.0l-1108.5013 0z" fill-rule="evenodd"/><path fill="#fff2cc" d="m254.88452 348.7533l194.23358 0l0 -208.67456l237.55646 0l0 -123.472435l-423.84515 0l-7.9448853 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m254.88452 348.7533l194.23358 0l0 -208.67456l237.55646 0l0 -123.472435l-423.84515 0l-7.9448853 0z" fill-rule="evenodd"/><path fill="#f4cccc" d="m486.98688 147.19948l396.8504 0l0 403.81104l-396.8504 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m486.98688 147.19948l396.8504 0l0 403.81104l-396.8504 0z" fill-rule="evenodd"/><path fill="#d9d9d9" d="m38.39895 360.0l409.79526 0l0 493.57483l-409.79526 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m38.39895 360.0l409.79526 0l0 493.57483l-409.79526 0z" fill-rule="evenodd"/><path fill="#ead1dc" d="m42.062992 21.755905l185.6693 0l0 327.81104l-185.6693 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m42.062992 21.755905l185.6693 0l0 327.81104l-185.6693 0z" fill-rule="evenodd"/><path fill="#d9ead3" d="m486.98688 562.54333l0 0c0 -0.0028686523 0.0022888184 -0.005126953 0.0050964355 -0.005126953l613.0028 0.005126953c0.0013427734 0 0.0026855469 4.8828125E-4 0.0036621094 0.0014648438c8.544922E-4 9.765625E-4 0.0014648438 0.0022583008 0.0014648438 0.0036010742l-0.005126953 351.61694c0 0.0028076172 -0.002319336 0.005126953 -0.005126953 0.005126953l-613.00275 -0.005126953l0 0c-0.0028076172 0 -0.0050964355 -0.0022583008 -0.0050964355 -0.005126953z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m486.98688 562.54333l0 0c0 -0.0028686523 0.0022888184 -0.005126953 0.0050964355 -0.005126953l613.0028 0.005126953c0.0013427734 0 0.0026855469 4.8828125E-4 0.0036621094 0.0014648438c8.544922E-4 9.765625E-4 0.0014648438 0.0022583008 0.0014648438 0.0036010742l-0.005126953 351.61694c0 0.0028076172 -0.002319336 0.005126953 -0.005126953 0.005126953l-613.00275 -0.005126953l0 0c-0.0028076172 0 -0.0050964355 -0.0022583008 -0.0050964355 -0.005126953z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m50.929134 58.25197l0 0c0 -6.2505302 36.66212 -11.317585 81.88713 -11.317585c45.22502 0 81.887146 5.0670547 81.887146 11.317585l0 45.27034c0 6.250534 -36.662125 11.317589 -81.887146 11.317589c-45.225014 0 -81.88713 -5.0670547 -81.88713 -11.317589z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m214.70341 58.25197l0 0c0 6.2505264 -36.662125 11.317589 -81.887146 11.317589c-45.225014 0 -81.88713 -5.0670624 -81.88713 -11.317589" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m50.929134 58.25197l0 0c0 -6.2505302 36.66212 -11.317585 81.88713 -11.317585c45.22502 0 81.887146 5.0670547 81.887146 11.317585l0 45.27034c0 6.250534 -36.662125 11.317589 -81.887146 11.317589c-45.225014 0 -81.88713 -5.0670547 -81.88713 -11.317589z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m214.70341 58.25197l0 0c0 6.2505264 -36.662125 11.317589 -81.887146 11.317589c-45.225014 0 -81.88713 -5.0670624 -81.88713 -11.317589" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.929134 58.25197l0 0c0 -6.2505302 36.66212 -11.317585 81.88713 -11.317585c45.22502 0 81.887146 5.0670547 81.887146 11.317585l0 45.27034c0 6.250534 -36.662125 11.317589 -81.887146 11.317589c-45.225014 0 -81.88713 -5.0670547 -81.88713 -11.317589z" fill-rule="evenodd"/><path fill="#000000" d="m87.6296 83.34593l0 -8.421875l-3.140625 0l0 -1.125l7.5625 0l0 1.125l-3.15625 0l0 8.421875l-1.265625 0zm5.0528107 0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.96962 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm3.0062256 -4.71875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.006226 4.125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm10.474846 3.453125l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218903 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.4217377 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.9064026 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696198 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m76.66056 99.34593l0 -9.546875l1.25 0l0 9.546875l-1.25 0zm3.3279877 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm11.271713 0l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4210052 7.65625l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm9.2890625 -2.65625l0 -8.421875l-3.140625 0l0 -1.125l7.5625 0l0 1.125l-3.15625 0l0 8.421875l-1.265625 0zm3.647873 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.969627 -2.53125l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.9218826 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.1250076 0 1.8437576 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.8906326 0 -1.4531326 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875076 0 1.1406326 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm2.1484375 2.53125l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm6.5374756 0.15625l2.765625 -9.859375l0.9375 0l-2.765625 9.859375l-0.9375 0zm4.6717377 -0.15625l0 -9.546875l3.59375 0q1.09375 0 1.75 0.296875q0.65625 0.28125 1.03125 0.890625q0.375 0.609375 0.375 1.265625q0 0.609375 -0.34375 1.15625q-0.328125 0.53125 -0.984375 0.859375q0.859375 0.25 1.328125 0.875q0.46875 0.609375 0.46875 1.4375q0 0.671875 -0.296875 1.25q-0.28125 0.578125 -0.703125 0.890625q-0.40625 0.3125 -1.03125 0.46875q-0.625 0.15625 -1.546875 0.15625l-3.640625 0zm1.265625 -5.53125l2.0625 0q0.84375 0 1.203125 -0.109375q0.484375 -0.140625 0.71875 -0.46875q0.25 -0.34375 0.25 -0.84375q0 -0.46875 -0.234375 -0.828125q-0.21875 -0.359375 -0.640625 -0.5q-0.421875 -0.140625 -1.453125 -0.140625l-1.90625 0l0 2.890625zm0 4.40625l2.375 0q0.609375 0 0.859375 -0.046875q0.4375 -0.078125 0.734375 -0.25q0.296875 -0.1875 0.484375 -0.53125q0.1875 -0.359375 0.1875 -0.8125q0 -0.53125 -0.28125 -0.921875q-0.265625 -0.40625 -0.75 -0.5625q-0.484375 -0.15625 -1.40625 -0.15625l-2.203125 0l0 3.28125zm7.5147552 1.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.18837 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm8.443726 4.125l-2.625 -6.90625l1.234375 0l1.484375 4.140625q0.234375 0.65625 0.4375 1.390625q0.15625 -0.546875 0.4375 -1.3125l1.53125 -4.21875l1.21875 0l-2.625 6.90625l-1.09375 0zm4.7578125 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.507965 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.6561127 0.1875q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.5218506 1.40625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m50.92651 170.59602l0 0c0 -6.250656 5.0671577 -11.31781 11.31781 -11.31781l141.1439 0c3.0016632 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.001236 3.314911 8.002899l0 45.269897c0 6.250656 -5.067154 11.31781 -11.31781 11.31781l-141.1439 0c-6.2506523 0 -11.31781 -5.067154 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.92651 170.59602l0 0c0 -6.250656 5.0671577 -11.31781 11.31781 -11.31781l141.1439 0c3.0016632 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.001236 3.314911 8.002899l0 45.269897c0 6.250656 -5.067154 11.31781 -11.31781 11.31781l-141.1439 0c-6.2506523 0 -11.31781 -5.067154 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m109.71239 190.03098l0 -9.546875l3.59375 0q1.09375 0 1.75 0.296875q0.65625 0.28125 1.03125 0.890625q0.375 0.609375 0.375 1.265625q0 0.609375 -0.34375 1.15625q-0.328125 0.53125 -0.984375 0.859375q0.859375 0.25 1.328125 0.875q0.46875 0.609375 0.46875 1.4375q0 0.671875 -0.296875 1.25q-0.28125 0.578125 -0.703125 0.890625q-0.40625 0.3125 -1.03125 0.46875q-0.625 0.15625 -1.546875 0.15625l-3.640625 0zm1.265625 -5.53125l2.0625 0q0.84375 0 1.203125 -0.109375q0.484375 -0.140625 0.71875 -0.46875q0.25 -0.34375 0.25 -0.84375q0 -0.46875 -0.234375 -0.828125q-0.21875 -0.359375 -0.640625 -0.5q-0.421875 -0.140625 -1.453125 -0.140625l-1.90625 0l0 2.890625zm0 4.40625l2.375 0q0.609375 0 0.859375 -0.046875q0.4375 -0.078125 0.734375 -0.25q0.296875 -0.1875 0.484375 -0.53125q0.1875 -0.359375 0.1875 -0.8125q0 -0.53125 -0.28125 -0.921875q-0.265625 -0.40625 -0.75 -0.5625q-0.484375 -0.15625 -1.40625 -0.15625l-2.203125 0l0 3.28125zm7.5147552 1.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.18837 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm8.443726 4.125l-2.625 -6.90625l1.234375 0l1.484375 4.140625q0.234375 0.65625 0.4375 1.390625q0.15625 -0.546875 0.4375 -1.3125l1.53125 -4.21875l1.21875 0l-2.625 6.90625l-1.09375 0zm4.7578125 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.507965 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.6561127 0.1875q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.5218506 1.40625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#000000" d="m76.517044 206.03098l0 -9.546875l6.4375 0l0 1.125l-5.171875 0l0 2.96875l4.46875 0l0 1.125l-4.46875 0l0 4.328125l-1.265625 0zm8.297607 0l0 -9.546875l1.25 0l0 9.546875l-1.25 0zm3.4686127 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm9.625153 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm9.031403 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm4.6571198 -1.78125q0 -2.375 1.28125 -3.71875q1.28125 -1.34375 3.296875 -1.34375q1.3125 0 2.375 0.625q1.0625 0.625 1.609375 1.765625q0.5625 1.125 0.5625 2.5625q0 1.4375 -0.59375 2.59375q-0.578125 1.140625 -1.65625 1.734375q-1.0625 0.578125 -2.3125 0.578125q-1.34375 0 -2.40625 -0.640625q-1.0625 -0.65625 -1.609375 -1.78125q-0.546875 -1.125 -0.546875 -2.375zm1.3125 0.015625q0 1.71875 0.921875 2.71875q0.921875 0.984375 2.328125 0.984375q1.421875 0 2.34375 -1.0q0.921875 -1.0 0.921875 -2.84375q0 -1.15625 -0.40625 -2.03125q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.6875 -0.484375q-1.34375 0 -2.3125 0.921875q-0.953125 0.921875 -0.953125 3.078125zm9.42955 4.625l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm9.625153 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm8.672028 0l3.6875 -4.96875l-3.25 -4.578125l1.5 0l1.734375 2.453125q0.53125 0.765625 0.765625 1.171875q0.3125 -0.515625 0.75 -1.09375l1.921875 -2.53125l1.375 0l-3.359375 4.5l3.625 5.046875l-1.5625 0l-2.40625 -3.40625q-0.203125 -0.296875 -0.421875 -0.640625q-0.3125 0.53125 -0.453125 0.71875l-2.390625 3.328125l-1.515625 0zm18.139618 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm5.7406006 4.125l2.53125 -3.59375l-2.34375 -3.3125l1.46875 0l1.0625 1.609375q0.296875 0.46875 0.484375 0.78125q0.28125 -0.4375 0.515625 -0.765625l1.171875 -1.625l1.40625 0l-2.390625 3.25l2.5625 3.65625l-1.4375 0l-1.421875 -2.140625l-0.375 -0.59375l-1.8125 2.734375l-1.421875 0zm7.4453125 2.65625l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm5.9124756 -0.03125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312103 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm7.01651 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m50.929134 282.93964l0 0c0 -6.250519 36.66212 -11.317596 81.88713 -11.317596c45.22502 0 81.887146 5.0670776 81.887146 11.317596l0 45.270325c0 6.2505493 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.067047 -81.88713 -11.317596z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m214.70341 282.93964l0 0c0 6.250519 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317596" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m50.929134 282.93964l0 0c0 -6.250519 36.66212 -11.317596 81.88713 -11.317596c45.22502 0 81.887146 5.0670776 81.887146 11.317596l0 45.270325c0 6.2505493 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.067047 -81.88713 -11.317596z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m214.70341 282.93964l0 0c0 6.250519 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317596" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.929134 282.93964l0 0c0 -6.250519 36.66212 -11.317596 81.88713 -11.317596c45.22502 0 81.887146 5.0670776 81.887146 11.317596l0 45.270325c0 6.2505493 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.067047 -81.88713 -11.317596z" fill-rule="evenodd"/><path fill="#000000" d="m71.6068 308.0336l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218903 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.4217377 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.9064026 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696198 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.913925 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.1247253 0l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm7.4281006 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.726715 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm6.6468506 3.578125l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm6.9593506 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm4.8758698 2.859375l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.71109 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm8.443741 4.125l-2.6250153 -6.90625l1.2343903 0l1.484375 4.140625q0.234375 0.65625 0.4375 1.390625q0.15625 -0.546875 0.4375 -1.3125l1.53125 -4.21875l1.21875 0l-2.625 6.90625l-1.09375 0zm9.4765625 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m94.57558 319.39297q0 -2.375 1.28125 -3.71875q1.28125 -1.34375 3.296875 -1.34375q1.3125 0 2.375 0.625q1.0625 0.625 1.609375 1.765625q0.5625 1.125 0.5625 2.5625q0 1.4375 -0.59375 2.59375q-0.578125 1.140625 -1.65625 1.734375q-1.0625 0.578125 -2.3125 0.578125q-1.34375 0 -2.40625 -0.640625q-1.0625 -0.65625 -1.609375 -1.78125q-0.546875 -1.125 -0.546875 -2.375zm1.3125 0.015625q0 1.71875 0.921875 2.71875q0.921875 0.984375 2.328125 0.984375q1.421875 0 2.34375 -1.0q0.921875 -1.0 0.921875 -2.84375q0 -1.15625 -0.40625 -2.03125q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.6875 -0.484375q-1.34375 0 -2.3125 0.921875q-0.953125 0.921875 -0.953125 3.078125zm9.42955 4.625l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm9.625153 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm8.672028 0l3.6875 -4.96875l-3.25 -4.578125l1.5 0l1.734375 2.453125q0.53125 0.765625 0.765625 1.171875q0.3125 -0.515625 0.75 -1.09375l1.921875 -2.53125l1.375 0l-3.359375 4.5l3.625 5.046875l-1.5625 0l-2.40625 -3.40625q-0.203125 -0.296875 -0.421875 -0.640625q-0.3125 0.53125 -0.453125 0.71875l-2.390625 3.328125l-1.515625 0zm13.389618 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852448 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m50.92651 381.34406l0 0c0 -6.250641 5.0671577 -11.31781 11.31781 -11.31781l141.1439 0c3.0016632 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.002899l0 45.269897c0 6.250641 -5.067154 11.31781 -11.31781 11.31781l-141.1439 0c-6.2506523 0 -11.31781 -5.067169 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.92651 381.34406l0 0c0 -6.250641 5.0671577 -11.31781 11.31781 -11.31781l141.1439 0c3.0016632 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.002899l0 45.269897c0 6.250641 -5.067154 11.31781 -11.31781 11.31781l-141.1439 0c-6.2506523 0 -11.31781 -5.067169 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m96.37043 397.7165l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm11.733505 2.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1248627 1.046875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.18837 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.990593 3.46875l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm11.086807 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.99234 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.4281006 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875z" fill-rule="nonzero"/><path fill="#000000" d="m88.37151 416.779l0 -8.421875l-3.140625 0l0 -1.125l7.5625 0l0 1.125l-3.15625 0l0 8.421875l-1.265625 0zm5.0528107 0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.96962 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 3.46875l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm6.9437256 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4140625 2.0625l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm2.9842377 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.3281174 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.1562424 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.631218 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4539948 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.618057 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561127 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.507965 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm6.9437256 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m50.929134 484.6824l0 0c0 -6.250519 36.66212 -11.317566 81.88713 -11.317566c45.22502 0 81.887146 5.067047 81.887146 11.317566l0 45.270355c0 6.2505493 -36.662125 11.317566 -81.887146 11.317566c-45.225014 0 -81.88713 -5.0670166 -81.88713 -11.317566z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m214.70341 484.6824l0 0c0 6.2505493 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.067047 -81.88713 -11.317596" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m50.929134 484.6824l0 0c0 -6.250519 36.66212 -11.317566 81.88713 -11.317566c45.22502 0 81.887146 5.067047 81.887146 11.317566l0 45.270355c0 6.2505493 -36.662125 11.317566 -81.887146 11.317566c-45.225014 0 -81.88713 -5.0670166 -81.88713 -11.317566z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m214.70341 484.6824l0 0c0 6.2505493 -36.662125 11.317596 -81.887146 11.317596c-45.225014 0 -81.88713 -5.067047 -81.88713 -11.317596" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.929134 484.6824l0 0c0 -6.250519 36.66212 -11.317566 81.88713 -11.317566c45.22502 0 81.887146 5.067047 81.887146 11.317566l0 45.270355c0 6.2505493 -36.662125 11.317566 -81.887146 11.317566c-45.225014 0 -81.88713 -5.0670166 -81.88713 -11.317566z" fill-rule="evenodd"/><path fill="#000000" d="m65.25625 506.71387l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm11.733505 2.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1248627 1.046875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.18837 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 3.46875l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm11.086807 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.99234 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.006233 4.125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm10.334213 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.4217377 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.9064026 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696198 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.913925 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m64.43442 525.77637l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm7.4281006 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.726715 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm6.6468506 3.578125l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm6.9593506 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm4.8758698 2.859375l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.71109 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm8.443726 4.125l-2.625 -6.90625l1.234375 0l1.484375 4.140625q0.234375 0.65625 0.4375 1.390625q0.15625 -0.546875 0.4375 -1.3125l1.53125 -4.21875l1.21875 0l-2.625 6.90625l-1.09375 0zm9.4765625 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm6.4453278 -4.640625q0 -2.375 1.28125 -3.71875q1.28125 -1.34375 3.296875 -1.34375q1.3125 0 2.375 0.625q1.0625 0.625 1.609375 1.765625q0.5625 1.125 0.5625 2.5625q0 1.4375 -0.59375 2.59375q-0.578125 1.140625 -1.65625 1.734375q-1.0625 0.578125 -2.3125 0.578125q-1.34375 0 -2.40625 -0.640625q-1.0625 -0.65625 -1.609375 -1.78125q-0.546875 -1.125 -0.546875 -2.375zm1.3125 0.015625q0 1.71875 0.921875 2.71875q0.921875 0.984375 2.328125 0.984375q1.421875 0 2.34375 -1.0q0.921875 -1.0 0.921875 -2.84375q0 -1.15625 -0.40625 -2.03125q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.6875 -0.484375q-1.34375 0 -2.3125 0.921875q-0.953125 0.921875 -0.953125 3.078125zm9.42955 4.625l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm9.625153 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm8.672028 0l3.6875 -4.96875l-3.25 -4.578125l1.5 0l1.734375 2.453125q0.53125 0.765625 0.765625 1.171875q0.3125 -0.515625 0.75 -1.09375l1.921875 -2.53125l1.375 0l-3.359375 4.5l3.625 5.046875l-1.5625 0l-2.40625 -3.40625q-0.203125 -0.296875 -0.421875 -0.640625q-0.3125 0.53125 -0.453125 0.71875l-2.390625 3.328125l-1.515625 0zm13.389618 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.23439026 -0.125 0.39064026 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.6250153 -6.921875l1.265625 0l1.4375153 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96876526 0.28125q-0.328125 0 -0.75 -0.15625zm11.445328 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m50.92651 588.02124l0 0c0 -6.2506714 5.0671577 -11.31781 11.31781 -11.31781l141.1439 0c3.0016632 0 5.8804016 1.1923828 8.002899 3.3148804c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.00293l0 45.269897c0 6.2506104 -5.067154 11.31781 -11.31781 11.31781l-141.1439 0c-6.2506523 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.92651 588.02124l0 0c0 -6.2506714 5.0671577 -11.31781 11.31781 -11.31781l141.1439 0c3.0016632 0 5.8804016 1.1923828 8.002899 3.3148804c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.00293l0 45.269897c0 6.2506104 -5.067154 11.31781 -11.31781 11.31781l-141.1439 0c-6.2506523 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m73.23254 612.1124l1.265625 0.3125q-0.390625 1.5625 -1.421875 2.375q-1.03125 0.8125 -2.53125 0.8125q-1.53125 0 -2.5 -0.625q-0.96875 -0.625 -1.484375 -1.8125q-0.5 -1.1875 -0.5 -2.5625q0 -1.484375 0.5625 -2.59375q0.578125 -1.109375 1.625 -1.6875q1.0625 -0.578125 2.328125 -0.578125q1.421875 0 2.390625 0.734375q0.984375 0.71875 1.375 2.046875l-1.25 0.296875q-0.328125 -1.046875 -0.96875 -1.515625q-0.625 -0.484375 -1.578125 -0.484375q-1.09375 0 -1.84375 0.53125q-0.734375 0.53125 -1.03125 1.421875q-0.296875 0.875 -0.296875 1.828125q0 1.21875 0.34375 2.125q0.359375 0.90625 1.109375 1.359375q0.75 0.4375 1.625 0.4375q1.0625 0 1.796875 -0.609375q0.734375 -0.609375 0.984375 -1.8125zm2.2345276 -0.109375q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm9.334351 0l-2.625 -6.90625l1.234375 0l1.484375 4.140625q0.234375 0.65625 0.4375 1.390625q0.15625 -0.546875 0.4375 -1.3125l1.53125 -4.21875l1.21875 0l-2.625 6.90625l-1.09375 0zm9.4765625 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm7.0164948 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm7.4059753 0l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm0.7029877 -2.40625q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm10.537331 3.453125l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.967743 3.0625l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm11.834351 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852448 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6404 588.021l0 0c0 -6.2505493 36.66214 -11.317566 81.887146 -11.317566c45.225006 0 81.887146 5.0670166 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.4147 588.021l0 0c0 6.2505493 -36.66214 11.317566 -81.887146 11.317566c-45.225006 0 -81.887146 -5.0670166 -81.887146 -11.317566" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m273.6404 588.021l0 0c0 -6.2505493 36.66214 -11.317566 81.887146 -11.317566c45.225006 0 81.887146 5.0670166 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m437.4147 588.021l0 0c0 6.2505493 -36.66214 11.317566 -81.887146 11.317566c-45.225006 0 -81.887146 -5.0670166 -81.887146 -11.317566" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6404 588.021l0 0c0 -6.2505493 36.66214 -11.317566 81.887146 -11.317566c45.225006 0 81.887146 5.0670166 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path fill="#000000" d="m288.39917 613.1149l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218903 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421753 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.9064026 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.91394 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.3122253 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858368 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4453125 2.0625l0 -1.328125l1.328125 0l0 1.328125q0 0.734375 -0.265625 1.1875q-0.25 0.453125 -0.8125 0.703125l-0.328125 -0.5q0.375 -0.171875 0.546875 -0.484375q0.171875 -0.3125 0.1875 -0.90625l-0.65625 0z" fill-rule="nonzero"/><path fill="#000000" d="m306.03482 629.1149l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.6180725 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.2093506 3.46875l2.53125 -3.59375l-2.34375 -3.3125l1.46875 0l1.0625 1.609375q0.296875 0.46875 0.484375 0.78125q0.28125 -0.4375 0.515625 -0.765625l1.171875 -1.625l1.40625 0l-2.390625 3.25l2.5625 3.65625l-1.4375 0l-1.421875 -2.140625l-0.375 -0.59375l-1.8125 2.734375l-1.421875 0zm7.4609375 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.633667 0l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8812256 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.08667 0l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm2.984253 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.46109 0l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm6.6468506 -4.734375l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6378 484.68265l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.002899l0 45.269867c0 6.2506714 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6378 484.68265l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.002899l0 45.269867c0 6.2506714 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m308.09225 504.11758l3.65625 -9.546875l1.359375 0l3.90625 9.546875l-1.4375 0l-1.109375 -2.890625l-3.984375 0l-1.046875 2.890625l-1.34375 0zm2.75 -3.921875l3.234375 0l-1.0 -2.640625q-0.453125 -1.203125 -0.671875 -1.96875q-0.1875 0.90625 -0.515625 1.8125l-1.046875 2.796875zm11.5147705 3.921875l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm6.6312256 -4.734375l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm-1.484375 10.875l0.21875 -1.0q0.359375 0.09375 0.546875 0.09375q0.359375 0 0.53125 -0.25q0.1875 -0.234375 0.1875 -1.1875l0 -7.25l1.171875 0l0 7.28125q0 1.28125 -0.328125 1.78125q-0.4375 0.65625 -1.40625 0.65625q-0.484375 0 -0.921875 -0.125zm8.976715 -2.6875l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.4124756 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm9.6953125 1.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.1247253 1.046875l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm2.9842224 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.46109 0l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm6.6468506 -4.734375l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm12.9123535 2.53125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm0.7029724 -2.40625q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375z" fill-rule="nonzero"/><path fill="#000000" d="m291.9674 511.93008l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875305l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm11.928101 -2.53125l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm2.1328125 2.53125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.188385 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.5218506 1.40625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm11.8671875 -0.15625l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.224823 6.78125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm11.084351 1.203125l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.73526 0l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.7031555 0.125 -1.0469055q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.8437805l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm2.9842224 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.45401 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.618042 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 3.46875l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm11.928101 -2.53125l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm6.8828125 0.3125l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6404 381.34384l0 0c0 -6.2505493 36.66214 -11.317596 81.887146 -11.317596c45.225006 0 81.887146 5.067047 81.887146 11.317596l0 45.270325c0 6.2505493 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.067047 -81.887146 -11.317596z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.4147 381.34384l0 0c0 6.250519 -36.66214 11.317566 -81.887146 11.317566c-45.225006 0 -81.887146 -5.067047 -81.887146 -11.317566" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m273.6404 381.34384l0 0c0 -6.2505493 36.66214 -11.317596 81.887146 -11.317596c45.225006 0 81.887146 5.067047 81.887146 11.317596l0 45.270325c0 6.2505493 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.067047 -81.887146 -11.317596z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m437.4147 381.34384l0 0c0 6.250519 -36.66214 11.317566 -81.887146 11.317566c-45.225006 0 -81.887146 -5.067047 -81.887146 -11.317566" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6404 381.34384l0 0c0 -6.2505493 36.66214 -11.317596 81.887146 -11.317596c45.225006 0 81.887146 5.067047 81.887146 11.317596l0 45.270325c0 6.2505493 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.067047 -81.887146 -11.317596z" fill-rule="evenodd"/><path fill="#000000" d="m288.39917 406.4378l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218903 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421753 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.9064026 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.91394 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.3122253 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858368 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4453125 2.0625l0 -1.328125l1.328125 0l0 1.328125q0 0.734375 -0.265625 1.1875q-0.25 0.453125 -0.8125 0.703125l-0.328125 -0.5q0.375 -0.171875 0.546875 -0.484375q0.171875 -0.3125 0.1875 -0.90625l-0.65625 0z" fill-rule="nonzero"/><path fill="#000000" d="m317.5412 422.4378l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm11.365601 1.234375l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.0531006 2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.92984 0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.006226 4.125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm10.6154785 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm2.9842224 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.46109 0l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm6.6468506 -4.734375l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6378 278.00546l0 0c0 -6.250641 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.001251 3.314911 8.002899l0 45.269897c0 6.2506714 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6378 278.00546l0 0c0 -6.250641 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1924133 8.002899 3.314911c2.1224976 2.1224976 3.314911 5.001251 3.314911 8.002899l0 45.269897c0 6.2506714 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m309.02795 294.09668l1.265625 0.3125q-0.390625 1.5625 -1.421875 2.375q-1.03125 0.8125 -2.53125 0.8125q-1.53125 0 -2.5 -0.625q-0.96875 -0.625 -1.484375 -1.8125q-0.5 -1.1875 -0.5 -2.5625q0 -1.484375 0.5625 -2.59375q0.578125 -1.109375 1.625 -1.6875q1.0625 -0.578125 2.328125 -0.578125q1.421875 0 2.390625 0.734375q0.984375 0.71875 1.375 2.046875l-1.25 0.296875q-0.328125 -1.046875 -0.96875 -1.515625q-0.625 -0.484375 -1.578125 -0.484375q-1.09375 0 -1.84375 0.53125q-0.734375 0.53125 -1.03125 1.421875q-0.296875 0.875 -0.296875 1.828125q0 1.21875 0.34375 2.125q0.359375 0.90625 1.109375 1.359375q0.75 0.4375 1.625 0.4375q1.0625 0 1.796875 -0.609375q0.734375 -0.609375 0.984375 -1.8125zm2.6564026 3.34375l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.188385 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.8748474 -1.171875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.4123535 4.125l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm13.248993 3.0625l0 -9.546875l1.25 0l0 9.546875l-1.25 0zm3.4842224 0l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm10.929993 7.65625l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm11.084351 1.203125l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m341.9427 313.44043l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6404 170.5958l0 0c0 -6.250519 36.66214 -11.317581 81.887146 -11.317581c45.225006 0 81.887146 5.0670624 81.887146 11.317581l0 45.27034c0 6.250534 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.0670624 -81.887146 -11.317596z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.4147 170.5958l0 0c0 6.250534 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.0670624 -81.887146 -11.317596" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m273.6404 170.5958l0 0c0 -6.250519 36.66214 -11.317581 81.887146 -11.317581c45.225006 0 81.887146 5.0670624 81.887146 11.317581l0 45.27034c0 6.250534 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.0670624 -81.887146 -11.317596z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m437.4147 170.5958l0 0c0 6.250534 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.0670624 -81.887146 -11.317596" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6404 170.5958l0 0c0 -6.250519 36.66214 -11.317581 81.887146 -11.317581c45.225006 0 81.887146 5.0670624 81.887146 11.317581l0 45.27034c0 6.250534 -36.66214 11.317596 -81.887146 11.317596c-45.225006 0 -81.887146 -5.0670624 -81.887146 -11.317596z" fill-rule="evenodd"/><path fill="#000000" d="m288.39917 195.68976l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218903 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421753 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.9064026 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.91394 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.3122253 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858368 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4453125 2.0625l0 -1.328125l1.328125 0l0 1.328125q0 0.734375 -0.265625 1.1875q-0.25 0.453125 -0.8125 0.703125l-0.328125 -0.5q0.375 -0.171875 0.546875 -0.484375q0.171875 -0.3125 0.1875 -0.90625l-0.65625 0z" fill-rule="nonzero"/><path fill="#000000" d="m309.04004 208.23663q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.5998535 4.125l0 -9.546875l1.25 0l0 9.546875l-1.25 0zm3.4842224 0l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm10.929993 7.65625l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm11.084351 1.203125l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.141357 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.5937195 0 1.2030945 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.8593445 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6378 58.252193l0 0c0 -6.2506523 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1924095 8.002899 3.314911c2.1224976 2.1225014 3.314911 5.001232 3.314911 8.002899l0 45.269894c0 6.2506485 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671616 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6378 58.252193l0 0c0 -6.2506523 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1924095 8.002899 3.314911c2.1224976 2.1225014 3.314911 5.001232 3.314911 8.002899l0 45.269894c0 6.2506485 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671616 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m297.0469 82.34339l1.265625 0.3125q-0.390625 1.5625 -1.421875 2.375q-1.03125 0.8125 -2.53125 0.8125q-1.53125 0 -2.5 -0.625q-0.96875 -0.625 -1.484375 -1.8125q-0.5 -1.1875 -0.5 -2.5625q0 -1.484375 0.5625 -2.59375q0.578125 -1.109375 1.625 -1.6875q1.0625 -0.578125 2.328125 -0.578125q1.421875 0 2.390625 0.734375q0.984375 0.71875 1.375 2.046875l-1.25 0.296875q-0.328125 -1.046875 -0.96875 -1.515625q-0.625 -0.484375 -1.578125 -0.484375q-1.09375 0 -1.84375 0.53125q-0.734375 0.53125 -1.03125 1.421875q-0.296875 0.875 -0.296875 1.828125q0 1.21875 0.34375 2.125q0.359375 0.90625 1.109375 1.359375q0.75 0.4375 1.625 0.4375q1.0625 0 1.796875 -0.609375q0.734375 -0.609375 0.984375 -1.8125zm2.6564026 3.34375l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.874878 -1.171875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.756073 2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm9.6953125 1.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.507965 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.6560974 -1.484375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm2.1484375 2.53125l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.006226 4.125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm14.8186035 3.453125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm11.365601 1.234375l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.0531006 2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.726715 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm6.6468506 3.578125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m504.0 58.25197l0 0c0 -6.2505302 36.66211 -11.317585 81.887146 -11.317585c45.225037 0 81.887146 5.0670547 81.887146 11.317585l0 45.27034c0 6.250534 -36.66211 11.317589 -81.887146 11.317589c-45.225037 0 -81.887146 -5.0670547 -81.887146 -11.317589z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m667.7743 58.25197l0 0c0 6.2505264 -36.66211 11.317589 -81.887146 11.317589c-45.225037 0 -81.887146 -5.0670624 -81.887146 -11.317589" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m504.0 58.25197l0 0c0 -6.2505302 36.66211 -11.317585 81.887146 -11.317585c45.225037 0 81.887146 5.0670547 81.887146 11.317585l0 45.27034c0 6.250534 -36.66211 11.317589 -81.887146 11.317589c-45.225037 0 -81.887146 -5.0670547 -81.887146 -11.317589z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7743 58.25197l0 0c0 6.2505264 -36.66211 11.317589 -81.887146 11.317589c-45.225037 0 -81.887146 -5.0670624 -81.887146 -11.317589" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m504.0 58.25197l0 0c0 -6.2505302 36.66211 -11.317585 81.887146 -11.317585c45.225037 0 81.887146 5.0670547 81.887146 11.317585l0 45.27034c0 6.250534 -36.66211 11.317589 -81.887146 11.317589c-45.225037 0 -81.887146 -5.0670547 -81.887146 -11.317589z" fill-rule="evenodd"/><path fill="#000000" d="m514.86707 83.34593l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218872 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421753 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.906372 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696655 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.913879 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.312256 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531372 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858398 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852905 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm13.304504 0.515625l0 -2.609375l-2.609375 0l0 -1.09375l2.609375 0l0 -2.59375l1.09375 0l0 2.59375l2.609375 0l0 1.09375l-2.609375 0l0 2.609375l-1.09375 0z" fill-rule="nonzero"/><path fill="#000000" d="m555.1792 97.28343l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm9.6953125 1.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.5079346 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.656128 -1.484375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm2.1484375 2.53125l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.006226 4.125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm10.7092285 3.453125l0 -9.546875l1.25 0l0 9.546875l-1.25 0zm3.484253 0l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m503.99738 170.59602l0 0c0 -6.250656 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.001236 3.3149414 8.002899l0 45.269897c0 6.250656 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.067154 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m503.99738 170.59602l0 0c0 -6.250656 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.001236 3.3149414 8.002899l0 45.269897c0 6.250656 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.067154 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m537.4189 186.68723l1.265625 0.3125q-0.390625 1.5625 -1.421875 2.375q-1.03125 0.8125 -2.53125 0.8125q-1.53125 0 -2.5 -0.625q-0.96875 -0.625 -1.484375 -1.8125q-0.5 -1.1875 -0.5 -2.5625q0 -1.484375 0.5625 -2.59375q0.578125 -1.109375 1.625 -1.6875q1.0625 -0.578125 2.328125 -0.578125q1.421875 0 2.390625 0.734375q0.984375 0.71875 1.375 2.046875l-1.25 0.296875q-0.328125 -1.046875 -0.96875 -1.515625q-0.625 -0.484375 -1.578125 -0.484375q-1.09375 0 -1.84375 0.53125q-0.734375 0.53125 -1.03125 1.421875q-0.296875 0.875 -0.296875 1.828125q0 1.21875 0.34375 2.125q0.359375 0.90625 1.109375 1.359375q0.75 0.4375 1.625 0.4375q1.0625 0 1.796875 -0.609375q0.734375 -0.609375 0.984375 -1.8125zm2.656372 3.34375l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.874878 -1.171875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.3811035 4.125l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm10.3116455 5.0l0 -4.046875l-3.6875 -5.5l1.546875 0l1.875 2.875q0.515625 0.8125 0.96875 1.625q0.4375 -0.75 1.046875 -1.6875l1.84375 -2.8125l1.46875 0l-3.796875 5.5l0 4.046875l-1.265625 0zm6.1865845 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm16.875183 -1.015625q0.875 0.59375 1.609375 0.875l-0.359375 0.875q-1.03125 -0.359375 -2.0625 -1.15625q-1.0625 0.578125 -2.34375 0.578125q-1.296875 0 -2.34375 -0.625q-1.046875 -0.625 -1.625 -1.75q-0.5625 -1.125 -0.5625 -2.546875q0 -1.421875 0.5625 -2.578125q0.578125 -1.15625 1.625 -1.75q1.0625 -0.609375 2.375 -0.609375q1.328125 0 2.375 0.625q1.0625 0.625 1.625 1.75q0.5625 1.125 0.5625 2.546875q0 1.1875 -0.359375 2.125q-0.359375 0.9375 -1.078125 1.640625zm-2.78125 -1.625q1.09375 0.3125 1.796875 0.921875q1.109375 -1.015625 1.109375 -3.0625q0 -1.15625 -0.390625 -2.015625q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.703125 -0.484375q-1.40625 0 -2.34375 0.96875q-0.921875 0.96875 -0.921875 2.890625q0 1.859375 0.921875 2.859375q0.921875 0.984375 2.34375 0.984375q0.6875 0 1.28125 -0.25q-0.59375 -0.390625 -1.25 -0.546875l0.3125 -0.921875zm8.991882 0.578125l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1328125 2.0625l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.9611206 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m566.7602 208.68723l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3343506 3.421875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.0164795 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 -4.734375l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm-1.484375 10.875l0.21875 -1.0q0.359375 0.09375 0.546875 0.09375q0.359375 0 0.53125 -0.25q0.1875 -0.234375 0.1875 -1.1875l0 -7.25l1.171875 0l0 7.28125q0 1.28125 -0.328125 1.78125q-0.4375 0.65625 -1.40625 0.65625q-0.484375 0 -0.921875 -0.125zm9.17981 -4.90625l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 1.59375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm4.7109375 1.484375l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m504.0 278.00525l0 0c0 -6.250519 36.66211 -11.317596 81.887146 -11.317596c45.225037 0 81.887146 5.0670776 81.887146 11.317596l0 45.270355c0 6.250519 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317566z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m667.7743 278.00525l0 0c0 6.250519 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.0670776 -81.887146 -11.317596" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m504.0 278.00525l0 0c0 -6.250519 36.66211 -11.317596 81.887146 -11.317596c45.225037 0 81.887146 5.0670776 81.887146 11.317596l0 45.270355c0 6.250519 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317566z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7743 278.00525l0 0c0 6.250519 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.0670776 -81.887146 -11.317596" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m504.0 278.00525l0 0c0 -6.250519 36.66211 -11.317596 81.887146 -11.317596c45.225037 0 81.887146 5.0670776 81.887146 11.317596l0 45.270355c0 6.250519 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317566z" fill-rule="evenodd"/><path fill="#000000" d="m514.86707 303.0992l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218872 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421753 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.906372 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696655 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.913879 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.312256 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531372 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858398 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852905 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm13.304504 0.515625l0 -2.609375l-2.609375 0l0 -1.09375l2.609375 0l0 -2.59375l1.09375 0l0 2.59375l2.609375 0l0 1.09375l-2.609375 0l0 2.609375l-1.09375 0z" fill-rule="nonzero"/><path fill="#000000" d="m524.07184 319.0992l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm10.3116455 5.0l0 -4.046875l-3.6875 -5.5l1.546875 0l1.875 2.875q0.515625 0.8125 0.96875 1.625q0.4375 -0.75 1.046875 -1.6875l1.84375 -2.8125l1.46875 0l-3.796875 5.5l0 4.046875l-1.265625 0zm6.1866455 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm16.875122 -1.015625q0.875 0.59375 1.609375 0.875l-0.359375 0.875q-1.03125 -0.359375 -2.0625 -1.15625q-1.0625 0.578125 -2.34375 0.578125q-1.296875 0 -2.34375 -0.625q-1.046875 -0.625 -1.625 -1.75q-0.5625 -1.125 -0.5625 -2.546875q0 -1.421875 0.5625 -2.578125q0.578125 -1.15625 1.625 -1.75q1.0625 -0.609375 2.375 -0.609375q1.328125 0 2.375 0.625q1.0625 0.625 1.625 1.75q0.5625 1.125 0.5625 2.546875q0 1.1875 -0.359375 2.125q-0.359375 0.9375 -1.078125 1.640625zm-2.78125 -1.625q1.09375 0.3125 1.796875 0.921875q1.109375 -1.015625 1.109375 -3.0625q0 -1.15625 -0.390625 -2.015625q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.703125 -0.484375q-1.40625 0 -2.34375 0.96875q-0.921875 0.96875 -0.921875 2.890625q0 1.859375 0.921875 2.859375q0.921875 0.984375 2.34375 0.984375q0.6875 0 1.28125 -0.25q-0.59375 -0.390625 -1.25 -0.546875l0.3125 -0.921875zm12.335693 2.640625l-3.6875 -9.546875l1.359375 0l2.484375 6.9375q0.296875 0.828125 0.5 1.5625q0.21875 -0.78125 0.515625 -1.5625l2.578125 -6.9375l1.28125 0l-3.734375 9.546875l-1.296875 0zm5.7894897 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm4.8673096 0l-2.625 -6.90625l1.234375 0l1.484375 4.140625q0.234375 0.65625 0.4375 1.390625q0.15625 -0.546875 0.4375 -1.3125l1.53125 -4.21875l1.21875 0l-2.625 6.90625l-1.09375 0zm9.2578125 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm7.4749756 3.46875l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm6.1937256 0q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm10.3498535 6.109375l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3343506 3.421875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.0164795 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 -4.734375l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm-1.484375 10.875l0.21875 -1.0q0.359375 0.09375 0.546875 0.09375q0.359375 0 0.53125 -0.25q0.1875 -0.234375 0.1875 -1.1875l0 -7.25l1.171875 0l0 7.28125q0 1.28125 -0.328125 1.78125q-0.4375 0.65625 -1.40625 0.65625q-0.484375 0 -0.921875 -0.125zm9.179871 -4.90625l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.037476 1.59375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm4.7109375 1.484375l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m503.99738 381.34406l0 0c0 -6.250641 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002899l0 45.269897c0 6.250641 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.067169 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m503.99738 381.34406l0 0c0 -6.250641 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002899l0 45.269897c0 6.250641 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.067169 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m524.9847 397.7165l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm9.1241455 5.71875l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm6.7109375 -2.65625l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm9.974976 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1405029 1.046875l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.0531006 2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.4766846 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4453125 2.0625l0 -1.328125l1.328125 0l0 1.328125q0 0.734375 -0.265625 1.1875q-0.25 0.453125 -0.8125 0.703125l-0.328125 -0.5q0.375 -0.171875 0.546875 -0.484375q0.171875 -0.3125 0.1875 -0.90625l-0.65625 0zm7.093506 2.65625l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3343506 3.421875l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm7.5062256 0.9375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm6.8828125 0.3125l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm14.7404785 3.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 3.46875l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm11.896851 0l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0z" fill-rule="nonzero"/><path fill="#000000" d="m571.5571 416.779l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.0164795 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm11.178101 3.453125l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm5.4437256 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.874878 -1.171875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m504.0 484.6824l0 0c0 -6.250519 36.66211 -11.317566 81.887146 -11.317566c45.225037 0 81.887146 5.067047 81.887146 11.317566l0 45.270355c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.0670166 -81.887146 -11.317566z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m667.7743 484.6824l0 0c0 6.2505493 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317596" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m504.0 484.6824l0 0c0 -6.250519 36.66211 -11.317566 81.887146 -11.317566c45.225037 0 81.887146 5.067047 81.887146 11.317566l0 45.270355c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.0670166 -81.887146 -11.317566z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7743 484.6824l0 0c0 6.2505493 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317596" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m504.0 484.6824l0 0c0 -6.250519 36.66211 -11.317566 81.887146 -11.317566c45.225037 0 81.887146 5.067047 81.887146 11.317566l0 45.270355c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.0670166 -81.887146 -11.317566z" fill-rule="evenodd"/><path fill="#000000" d="m570.5602 517.77637l0 -9.546875l3.59375 0q1.09375 0 1.75 0.296875q0.65625 0.28125 1.03125 0.890625q0.375 0.609375 0.375 1.265625q0 0.609375 -0.34375 1.15625q-0.328125 0.53125 -0.984375 0.859375q0.859375 0.25 1.328125 0.875q0.46875 0.609375 0.46875 1.4375q0 0.671875 -0.296875 1.25q-0.28125 0.578125 -0.703125 0.890625q-0.40625 0.3125 -1.03125 0.46875q-0.625 0.15625 -1.546875 0.15625l-3.640625 0zm1.265625 -5.53125l2.0625 0q0.84375 0 1.203125 -0.109375q0.484375 -0.140625 0.71875 -0.46875q0.25 -0.34375 0.25 -0.84375q0 -0.46875 -0.234375 -0.828125q-0.21875 -0.359375 -0.640625 -0.5q-0.421875 -0.140625 -1.453125 -0.140625l-1.90625 0l0 2.890625zm0 4.40625l2.375 0q0.609375 0 0.859375 -0.046875q0.4375 -0.078125 0.734375 -0.25q0.296875 -0.1875 0.484375 -0.53125q0.1875 -0.359375 0.1875 -0.8125q0 -0.53125 -0.28125 -0.921875q-0.265625 -0.40625 -0.75 -0.5625q-0.484375 -0.15625 -1.40625 -0.15625l-2.203125 0l0 3.28125zm7.5459595 -7.0625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.5079956 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.4217529 1.046875l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm3.437317 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9298706 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.7110596 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m132.81627 114.8399l0 44.44094" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.81627 114.8399l0 38.44094" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m131.16454 153.28084l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m132.81627 227.18373l0 44.440933" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.81627 227.18373l0 38.440933" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m131.16454 265.62466l1.6517334 4.5381165l1.6517334 -4.5381165z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m132.81627 339.52756l0 30.48819" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.81627 339.52756l0 24.48819" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m131.16454 364.01575l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m132.81627 437.93176l0 35.433075" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.81627 437.93176l0 29.433075" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m131.16454 467.36484l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52756 576.7034l0 -35.433105" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52756 576.7034l0 -29.433105" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.1793 547.2703l-1.6517334 -4.538086l-1.6517334 4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52756 473.36484l0 -35.433075" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52756 473.36484l0 -29.433075" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.1793 443.93176l-1.6517334 -4.5381165l-1.6517334 4.5381165z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m132.81627 541.2703l0 35.433105" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.81627 541.2703l0 29.433105" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m131.16454 570.7034l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52756 370.02625l0 -35.433075" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52756 370.02625l0 -29.433075" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.1793 340.59317l-1.6517334 -4.538086l-1.6517334 4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52756 266.68765l0 -39.496048" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52756 266.68765l0 -33.496048" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.1793 233.1916l-1.6517334 -4.538101l-1.6517334 4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52756 159.27821l0 -44.44094" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52756 159.27821l0 -38.44094" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.1793 120.83727l-1.6517334 -4.538101l-1.6517334 4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.41733 80.88714l66.58267 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m437.41733 80.88714l60.582672 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m498.0 82.53887l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m585.88715 114.8399l0 44.44094" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m585.88715 114.8399l0 38.44094" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m584.2354 153.28084l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m585.88715 227.18373l0 39.496048" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m585.88715 227.18373l0 33.496048" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m584.2354 260.67978l1.6517334 4.5381165l1.6517334 -4.5381165z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m585.88715 334.59317l0 35.433075" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m585.88715 334.59317l0 29.433075" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m584.2354 364.02625l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m585.88715 437.93176l0 35.433075" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m585.88715 437.93176l0 29.433075" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m584.2354 467.36484l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m503.99738 676.02124l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1923828 8.002869 3.3148804c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269897c0 6.2506104 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m503.99738 676.02124l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1923828 8.002869 3.3148804c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269897c0 6.2506104 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m545.44366 695.4562l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4522705 5.0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5218506 6.78125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm10.865601 2.5625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.188416 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.2091675 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm7.0165405 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1248169 1.046875l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.5079956 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m544.2087 714.2687q-0.984375 -1.234375 -1.65625 -2.875q-0.65625 -1.640625 -0.65625 -3.390625q0 -1.546875 0.5 -2.96875q0.578125 -1.640625 1.8125 -3.28125l0.828125 0q-0.78125 1.359375 -1.03125 1.9375q-0.40625 0.890625 -0.625 1.875q-0.28125 1.21875 -0.28125 2.4375q0 3.140625 1.9375 6.265625l-0.828125 0zm2.1727295 -2.8125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.0009155 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm5.9696045 2.859375l-1.078125 0l0 -9.546875l1.171875 0l0 3.40625q0.734375 -0.921875 1.890625 -0.921875q0.640625 0 1.203125 0.265625q0.578125 0.25 0.9375 0.71875q0.375 0.453125 0.578125 1.109375q0.203125 0.65625 0.203125 1.40625q0 1.78125 -0.875 2.75q-0.875 0.96875 -2.109375 0.96875q-1.21875 0 -1.921875 -1.015625l0 0.859375zm0 -3.5q0 1.234375 0.328125 1.78125q0.5625 0.90625 1.5 0.90625q0.765625 0 1.328125 -0.65625q0.5625 -0.671875 0.5625 -2.0q0 -1.34375 -0.546875 -1.984375q-0.53125 -0.65625 -1.296875 -0.65625q-0.765625 0 -1.328125 0.671875q-0.546875 0.671875 -0.546875 1.9375zm6.2874756 6.15625l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm6.2578125 -5.515625l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm4.8758545 2.859375l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923706 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm5.2196045 2.8125l-0.828125 0q1.9375 -3.125 1.9375 -6.265625q0 -1.21875 -0.28125 -2.421875q-0.21875 -0.984375 -0.609375 -1.875q-0.265625 -0.59375 -1.046875 -1.953125l0.828125 0q1.234375 1.640625 1.8125 3.28125q0.5 1.421875 0.5 2.96875q0 1.75 -0.671875 3.390625q-0.671875 1.640625 -1.640625 2.875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m503.99738 764.932l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1923828 8.002869 3.3148804c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269836c0 6.2506714 -5.0671997 11.317871 -11.31781 11.317871l-141.14392 0c-6.250641 0 -11.31781 -5.0671997 -11.31781 -11.317871z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m503.99738 764.932l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1923828 8.002869 3.3148804c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269836c0 6.2506714 -5.0671997 11.317871 -11.31781 11.317871l-141.14392 0c-6.250641 0 -11.31781 -5.0671997 -11.31781 -11.317871z" fill-rule="evenodd"/><path fill="#000000" d="m540.2504 792.36694l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4522095 5.0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.188416 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5218506 6.78125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm10.865601 2.5625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.2248535 4.125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.4124756 2.65625l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3031006 6.078125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm6.2421875 -4.71875l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m437.4147 610.6562l17.585144 0l0 176.91339l48.997528 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="1.0,3.0" d="m437.4147 610.6562l17.585144 0l0 176.91339l42.997528 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m497.99738 789.2213l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m929.3989 676.02124l0 0c0 -6.2506714 5.0671997 -11.31781 11.31781 -11.31781l141.14398 0c3.001587 0 5.880371 1.1923828 8.002808 3.3148804c2.1225586 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269897c0 6.2506104 -5.0671387 11.31781 -11.317749 11.31781l-141.14398 0c-6.2506104 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m929.3989 676.02124l0 0c0 -6.2506714 5.0671997 -11.31781 11.31781 -11.31781l141.14398 0c3.001587 0 5.880371 1.1923828 8.002808 3.3148804c2.1225586 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269897c0 6.2506104 -5.0671387 11.31781 -11.317749 11.31781l-141.14398 0c-6.2506104 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m960.4971 695.4562l0 -9.546875l6.90625 0l0 1.125l-5.640625 0l0 2.921875l5.28125 0l0 1.125l-5.28125 0l0 3.25l5.859375 0l0 1.125l-7.125 0zm8.7178955 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.633667 0l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8656006 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923706 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1560669 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.5079956 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm13.3654175 2.8125q-0.984375 -1.234375 -1.656189 -2.875q-0.65625 -1.640625 -0.65625 -3.390625q0 -1.546875 0.5 -2.96875q0.57806396 -1.640625 1.812439 -3.28125l0.828125 0q-0.78125 1.359375 -1.03125 1.9375q-0.40625 0.890625 -0.625 1.875q-0.28125 1.21875 -0.28125 2.4375q0 3.140625 1.9375 6.265625l-0.828125 0zm2.1727295 -2.8125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm7.0166016 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1247559 1.046875l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.5080566 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm11.868042 2.8125l-0.828125 0q1.9375 -3.125 1.9375 -6.265625q0 -1.21875 -0.28125 -2.421875q-0.21875 -0.984375 -0.609375 -1.875q-0.265625 -0.59375 -1.046875 -1.953125l0.828125 0q1.234375 1.640625 1.8125 3.28125q0.5 1.421875 0.5 2.96875q0 1.75 -0.671875 3.390625q-0.671875 1.640625 -1.640625 2.875z" fill-rule="nonzero"/><path fill="#000000" d="m966.68854 711.4562l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.4124756 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454956 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm10.5061035 3.578125l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4209595 7.65625l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm9.5859375 -2.65625l-3.6875 -9.546875l1.359375 0l2.484375 6.9375q0.296875 0.828125 0.5 1.5625q0.21875 -0.78125 0.515625 -1.5625l2.578125 -6.9375l1.28125 0l-3.734375 9.546875l-1.296875 0zm10.013489 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9298096 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4924316 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm0.70288086 -2.40625q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m716.7008 676.021l0 0c0 -6.2505493 36.66211 -11.317566 81.887146 -11.317566c45.224976 0 81.887085 5.0670166 81.887085 11.317566l0 45.270325c0 6.2505493 -36.66211 11.317627 -81.887085 11.317627c-45.225037 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m880.47504 676.021l0 0c0 6.2505493 -36.66211 11.317566 -81.887085 11.317566c-45.225037 0 -81.887146 -5.0670166 -81.887146 -11.317566" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m716.7008 676.021l0 0c0 -6.2505493 36.66211 -11.317566 81.887146 -11.317566c45.224976 0 81.887085 5.0670166 81.887085 11.317566l0 45.270325c0 6.2505493 -36.66211 11.317627 -81.887085 11.317627c-45.225037 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m880.47504 676.021l0 0c0 6.2505493 -36.66211 11.317566 -81.887085 11.317566c-45.225037 0 -81.887146 -5.0670166 -81.887146 -11.317566" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m716.7008 676.021l0 0c0 -6.2505493 36.66211 -11.317566 81.887146 -11.317566c45.224976 0 81.887085 5.0670166 81.887085 11.317566l0 45.270325c0 6.2505493 -36.66211 11.317627 -81.887085 11.317627c-45.225037 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path fill="#000000" d="m731.45953 701.1149l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218933 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421692 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.906433 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.91394 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.312195 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531433 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858337 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923706 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4453125 2.0625l0 -1.328125l1.328125 0l0 1.328125q0 0.734375 -0.265625 1.1875q-0.25 0.453125 -0.8125 0.703125l-0.328125 -0.5q0.375 -0.171875 0.546875 -0.484375q0.171875 -0.3125 0.1875 -0.90625l-0.65625 0z" fill-rule="nonzero"/><path fill="#000000" d="m739.2611 717.1149l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm8.359558 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.5079346 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1405029 1.046875l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm13.9904785 0l-3.6875 -9.546875l1.359375 0l2.484375 6.9375q0.296875 0.828125 0.5 1.5625q0.21875 -0.78125 0.515625 -1.5625l2.578125 -6.9375l1.28125 0l-3.734375 9.546875l-1.296875 0zm10.013428 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9298706 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm0.7030029 -2.40625q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.156982 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm10.664917 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm11.131226 3.453125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm11.365601 1.234375l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.5079956 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m721.5801 764.93176l0 0c0 -6.2505493 36.66211 -11.317566 81.887085 -11.317566c45.225037 0 81.887146 5.0670166 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66211 11.317627 -81.887146 11.317627c-45.224976 0 -81.887085 -5.0670776 -81.887085 -11.317627z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m885.3543 764.93176l0 0c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670166 -81.887085 -11.317566" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m721.5801 764.93176l0 0c0 -6.2505493 36.66211 -11.317566 81.887085 -11.317566c45.225037 0 81.887146 5.0670166 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66211 11.317627 -81.887146 11.317627c-45.224976 0 -81.887085 -5.0670776 -81.887085 -11.317627z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m885.3543 764.93176l0 0c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670166 -81.887085 -11.317566" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m721.5801 764.93176l0 0c0 -6.2505493 36.66211 -11.317566 81.887085 -11.317566c45.225037 0 81.887146 5.0670166 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66211 11.317627 -81.887146 11.317627c-45.224976 0 -81.887085 -5.0670776 -81.887085 -11.317627z" fill-rule="evenodd"/><path fill="#000000" d="m736.3388 790.0257l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm14.218933 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421692 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.906433 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm9.91394 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.312195 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531433 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858337 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923706 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.4453125 2.0625l0 -1.328125l1.328125 0l0 1.328125q0 0.734375 -0.265625 1.1875q-0.25 0.453125 -0.8125 0.703125l-0.328125 -0.5q0.375 -0.171875 0.546875 -0.484375q0.171875 -0.3125 0.1875 -0.90625l-0.65625 0z" fill-rule="nonzero"/><path fill="#000000" d="m750.0657 806.0257l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm8.359558 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.5079346 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1405029 1.046875l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm18.068604 -3.34375l1.265625 0.3125q-0.390625 1.5625 -1.421875 2.375q-1.03125 0.8125 -2.53125 0.8125q-1.53125 0 -2.5 -0.625q-0.96875 -0.625 -1.484375 -1.8125q-0.5 -1.1875 -0.5 -2.5625q0 -1.484375 0.5625 -2.59375q0.578125 -1.109375 1.625 -1.6875q1.0625 -0.578125 2.328125 -0.578125q1.421875 0 2.390625 0.734375q0.984375 0.71875 1.375 2.046875l-1.25 0.296875q-0.328125 -1.046875 -0.96875 -1.515625q-0.625 -0.484375 -1.578125 -0.484375q-1.09375 0 -1.84375 0.53125q-0.734375 0.53125 -1.03125 1.421875q-0.296875 0.875 -0.296875 1.828125q0 1.21875 0.34375 2.125q0.359375 0.90625 1.109375 1.359375q0.75 0.4375 1.625 0.4375q1.0625 0 1.796875 -0.609375q0.734375 -0.609375 0.984375 -1.8125zm5.140747 1.796875l0 -2.609375l-2.609375 0l0 -1.09375l2.609375 0l0 -2.59375l1.09375 0l0 2.59375l2.609375 0l0 1.09375l-2.609375 0l0 2.609375l-1.09375 0zm7.7834473 0l0 -2.609375l-2.609375 0l0 -1.09375l2.609375 0l0 -2.59375l1.09375 0l0 2.59375l2.609375 0l0 1.09375l-2.609375 0l0 2.609375l-1.09375 0zm10.298889 1.546875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm8.328247 0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.969666 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 6.125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3499756 6.078125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm11.084351 1.203125l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m929.3989 764.932l0 0c0 -6.2506714 5.0671997 -11.31781 11.31781 -11.31781l141.14398 0c3.001587 0 5.880371 1.1923828 8.002808 3.3148804c2.1225586 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269836c0 6.2506714 -5.0671387 11.317871 -11.317749 11.317871l-141.14398 0c-6.2506104 0 -11.31781 -5.0671997 -11.31781 -11.317871z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m929.3989 764.932l0 0c0 -6.2506714 5.0671997 -11.31781 11.31781 -11.31781l141.14398 0c3.001587 0 5.880371 1.1923828 8.002808 3.3148804c2.1225586 2.1224976 3.3149414 5.0012207 3.3149414 8.00293l0 45.269836c0 6.2506714 -5.0671387 11.317871 -11.317749 11.317871l-141.14398 0c-6.2506104 0 -11.31781 -5.0671997 -11.31781 -11.317871z" fill-rule="evenodd"/><path fill="#000000" d="m953.3701 781.30444l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm9.1866455 -5.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.633728 0l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8656006 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.5079346 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm13.3654785 2.8125q-0.984375 -1.234375 -1.65625 -2.875q-0.65625 -1.640625 -0.65625 -3.390625q0 -1.546875 0.5 -2.96875q0.578125 -1.640625 1.8125 -3.28125l0.828125 0q-0.78125 1.359375 -1.03125 1.9375q-0.40625 0.890625 -0.625 1.875q-0.28125 1.21875 -0.28125 2.4375q0 3.140625 1.9375 6.265625l-0.828125 0zm2.1884155 -2.8125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.4124756 2.65625l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3031006 6.078125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm6.2421875 -4.71875l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm11.868042 2.8125l-0.828125 0q1.9375 -3.125 1.9375 -6.265625q0 -1.21875 -0.28125 -2.421875q-0.21875 -0.984375 -0.609375 -1.875q-0.265625 -0.59375 -1.046875 -1.953125l0.828125 0q1.234375 1.640625 1.8125 3.28125q0.5 1.421875 0.5 2.96875q0 1.75 -0.671875 3.390625q-0.671875 1.640625 -1.640625 2.875z" fill-rule="nonzero"/><path fill="#000000" d="m986.31616 800.36694l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.4124756 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454956 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm17.302917 0.234375l1.265625 0.3125q-0.390625 1.5625 -1.421875 2.375q-1.031189 0.8125 -2.531189 0.8125q-1.53125 0 -2.5 -0.625q-0.96875 -0.625 -1.484375 -1.8125q-0.5 -1.1875 -0.5 -2.5625q0 -1.484375 0.5625 -2.59375q0.578125 -1.109375 1.625 -1.6875q1.0625 -0.578125 2.328125 -0.578125q1.421875 0 2.390564 0.734375q0.984375 0.71875 1.375 2.046875l-1.25 0.296875q-0.32806396 -1.046875 -0.96868896 -1.515625q-0.625 -0.484375 -1.578125 -0.484375q-1.09375 0 -1.84375 0.53125q-0.734375 0.53125 -1.03125 1.421875q-0.296875 0.875 -0.296875 1.828125q0 1.21875 0.34375 2.125q0.359375 0.90625 1.109375 1.359375q0.75 0.4375 1.625 0.4375q1.0625 0 1.796875 -0.609375q0.73431396 -0.609375 0.98431396 -1.8125zm5.140869 1.796875l0 -2.609375l-2.609375 0l0 -1.09375l2.609375 0l0 -2.59375l1.09375 0l0 2.59375l2.609375 0l0 1.09375l-2.609375 0l0 2.609375l-1.09375 0zm7.783325 0l0 -2.609375l-2.609375 0l0 -1.09375l2.609375 0l0 -2.59375l1.09375 0l0 2.59375l2.609375 0l0 1.09375l-2.609375 0l0 2.609375l-1.09375 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m667.7769 698.6562l48.91339 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7769 698.6562l42.91339 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m710.6903 700.3079l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m880.47504 698.6562l48.91339 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m880.47504 698.6562l42.91339 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m923.3884 700.3079l4.538147 -1.6517334l-4.538147 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m667.7769 787.56696l53.795288 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7769 787.56696l47.795288 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m715.5722 789.2187l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m885.3543 787.56696l44.031494 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m885.3543 787.56696l38.031494 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m923.3858 789.2187l4.538147 -1.6517334l-4.538147 -1.6517334z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m503.99738 847.76135l0 0c0 -6.2506104 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924438 8.002869 3.3149414c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002869l0 45.269897c0 6.2506714 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m503.99738 847.76135l0 0c0 -6.2506104 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924438 8.002869 3.3149414c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002869l0 45.269897c0 6.2506714 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m555.3667 864.13385l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm9.1866455 -5.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454956 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.633667 0l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8656006 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.5079956 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m552.7635 883.19635l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.4124756 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454956 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm10.5060425 3.578125l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4210205 7.65625l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm9.2734375 -3.703125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1405029 1.046875l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm6.9749756 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m50.929134 507.3176l-25.00045 0l0 363.08658l478.07132 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="1.0,3.0" d="m50.929134 507.3176l-25.000452 0l0 363.08658l472.07132 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m498.0 872.0559l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m898.21783 864.30444l202.96063 0l0 56.031494l-202.96063 0z" fill-rule="evenodd"/><path fill="#000000" d="m921.25586 883.0263l1.25 -0.125l-0.015625 0.328125q0 0.5625 0.25 1.015625q0.265625 0.453125 0.84375 0.71875q0.59375 0.25 1.390625 0.25q1.15625 0 1.75 -0.5q0.609375 -0.515625 0.609375 -1.15625q0 -0.453125 -0.328125 -0.8125q-0.328125 -0.375 -1.78125 -1.0q-1.125 -0.484375 -1.53125 -0.75q-0.640625 -0.421875 -0.953125 -0.90625q-0.296875 -0.5 -0.296875 -1.140625q0 -0.734375 0.390625 -1.328125q0.40625 -0.59375 1.1875 -0.90625q0.78125 -0.3125 1.75 -0.3125q1.171875 0 1.96875 0.390625q0.796875 0.390625 1.15625 1.046875q0.375 0.640625 0.375 1.234375q0 0.0625 -0.015625 0.203125l-1.21875 0.09375q0 -0.40625 -0.078125 -0.625q-0.125 -0.40625 -0.40625 -0.671875q-0.265625 -0.28125 -0.75 -0.4375q-0.484375 -0.171875 -1.0625 -0.171875q-1.046875 0 -1.625 0.46875q-0.453125 0.359375 -0.453125 0.953125q0 0.34375 0.1875 0.625q0.1875 0.28125 0.65625 0.546875q0.34375 0.1875 1.609375 0.75q1.03125 0.453125 1.421875 0.703125q0.515625 0.34375 0.796875 0.84375q0.28125 0.484375 0.28125 1.109375q0 0.78125 -0.46875 1.4375q-0.46875 0.65625 -1.3125 1.015625q-0.828125 0.34375 -1.90625 0.34375q-1.609375 0 -2.640625 -0.703125q-1.03125 -0.703125 -1.03125 -2.53125zm10.0616455 -5.125l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.0079956 0l1.4375 -6.90625l1.1875 0l-0.25 1.140625q0.65625 -0.75 1.171875 -1.015625q0.53125 -0.28125 1.125 -0.28125q0.65625 0 1.078125 0.34375q0.4375 0.328125 0.578125 0.953125q0.515625 -0.65625 1.09375 -0.96875q0.59375 -0.328125 1.234375 -0.328125q0.875 0 1.296875 0.40625q0.4375 0.40625 0.4375 1.15625q0 0.3125 -0.15625 1.046875l-0.921875 4.453125l-1.171875 0l0.9375 -4.5625q0.125 -0.5625 0.125 -0.8125q0 -0.328125 -0.21875 -0.515625q-0.203125 -0.203125 -0.59375 -0.203125q-0.53125 0 -1.078125 0.328125q-0.546875 0.3125 -0.859375 0.828125q-0.296875 0.515625 -0.515625 1.59375l-0.703125 3.34375l-1.1875 0l0.984375 -4.671875q0.109375 -0.46875 0.109375 -0.671875q0 -0.34375 -0.21875 -0.546875q-0.21875 -0.203125 -0.546875 -0.203125q-0.5 0 -1.0625 0.328125q-0.546875 0.3125 -0.890625 0.875q-0.34375 0.5625 -0.5625 1.625l-0.6875 3.265625l-1.171875 0zm15.821167 -1.25q-1.234375 1.40625 -2.546875 1.40625q-0.796875 0 -1.296875 -0.453125q-0.484375 -0.46875 -0.484375 -1.125q0 -0.4375 0.21875 -1.5l0.84375 -3.984375l1.171875 0l-0.921875 4.40625q-0.109375 0.5625 -0.109375 0.859375q0 0.390625 0.234375 0.609375q0.234375 0.21875 0.703125 0.21875q0.484375 0 0.953125 -0.234375q0.484375 -0.234375 0.8125 -0.640625q0.34375 -0.421875 0.5625 -0.984375q0.140625 -0.359375 0.328125 -1.25l0.625 -2.984375l1.1875 0l-1.453125 6.90625l-1.078125 0l0.25 -1.25zm2.5999756 1.25l2.0 -9.546875l1.171875 0l-1.984375 9.546875l-1.1875 0zm7.7266846 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm5.0843506 2.546875l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm2.609253 -7.25l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.2267456 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0zm17.287292 -0.9375q-0.8125 0.671875 -1.390625 0.90625q-0.578125 0.234375 -1.28125 0.234375q-1.296875 0 -2.09375 -0.765625q-0.796875 -0.765625 -0.796875 -1.921875q0 -0.75 0.296875 -1.328125q0.296875 -0.578125 0.875 -1.046875q0.453125 -0.359375 1.328125 -0.75q-0.328125 -0.75 -0.40625 -1.0q-0.09375 -0.359375 -0.09375 -0.78125q0 -0.546875 0.296875 -1.109375q0.3125 -0.5625 0.875 -0.875q0.5625 -0.328125 1.21875 -0.328125q0.921875 0 1.484375 0.53125q0.5625 0.53125 0.5625 1.296875q0 0.640625 -0.484375 1.28125q-0.484375 0.625 -1.890625 1.34375q0.859375 1.546875 1.640625 2.53125q0.46875 -0.578125 0.90625 -1.359375l1.03125 0.53125q-0.4375 0.890625 -1.203125 1.765625q0.421875 0.53125 1.109375 1.296875l-0.890625 0.71875q-0.65625 -0.578125 -1.09375 -1.171875zm-1.9375 -5.1875q0.96875 -0.5 1.375 -1.0q0.28125 -0.359375 0.28125 -0.71875q0 -0.390625 -0.265625 -0.65625q-0.25 -0.265625 -0.640625 -0.265625q-0.453125 0 -0.796875 0.375q-0.34375 0.359375 -0.34375 0.875q0 0.625 0.390625 1.390625zm1.25 4.296875q-1.203125 -1.59375 -1.890625 -2.859375q-0.875 0.40625 -1.265625 0.796875q-0.515625 0.546875 -0.515625 1.28125q0 0.734375 0.5 1.25q0.5 0.5 1.234375 0.5q0.984375 0 1.9375 -0.96875zm7.2646484 1.828125l2.0 -9.546875l6.890625 0l-0.234375 1.09375l-5.609375 0l-0.625 2.96875l5.46875 0l-0.21875 1.078125l-5.46875 0l-0.6875 3.328125l6.0 0l-0.234375 1.078125l-7.28125 0zm8.7335205 0l1.4375 -6.90625l1.1875 0l-0.25 1.140625q0.65625 -0.75 1.171875 -1.015625q0.53125 -0.28125 1.125 -0.28125q0.65625 0 1.078125 0.34375q0.4375 0.328125 0.578125 0.953125q0.515625 -0.65625 1.09375 -0.96875q0.59375 -0.328125 1.234375 -0.328125q0.875 0 1.296875 0.40625q0.4375 0.40625 0.4375 1.15625q0 0.3125 -0.15625 1.046875l-0.921875 4.453125l-1.171875 0l0.9375 -4.5625q0.125 -0.5625 0.125 -0.8125q0 -0.328125 -0.21875 -0.515625q-0.203125 -0.203125 -0.59375 -0.203125q-0.53125 0 -1.078125 0.328125q-0.546875 0.3125 -0.859375 0.828125q-0.296875 0.515625 -0.515625 1.59375l-0.703125 3.34375l-1.1875 0l0.984375 -4.671875q0.109375 -0.46875 0.109375 -0.671875q0 -0.34375 -0.21875 -0.546875q-0.21875 -0.203125 -0.546875 -0.203125q-0.5 0 -1.0625 0.328125q-0.546875 0.3125 -0.890625 0.875q-0.34375 0.5625 -0.5625 1.625l-0.6875 3.265625l-1.171875 0zm15.821167 -1.25q-1.234375 1.40625 -2.546875 1.40625q-0.796875 0 -1.296875 -0.453125q-0.484375 -0.46875 -0.484375 -1.125q0 -0.4375 0.21875 -1.5l0.84375 -3.984375l1.171875 0l-0.921875 4.40625q-0.109375 0.5625 -0.109375 0.859375q0 0.390625 0.234375 0.609375q0.234375 0.21875 0.703125 0.21875q0.484375 0 0.953125 -0.234375q0.484375 -0.234375 0.8125 -0.640625q0.34375 -0.421875 0.56243896 -0.984375q0.140625 -0.359375 0.328125 -1.25l0.625 -2.984375l1.1875 0l-1.453125 6.90625l-1.078064 0l0.25 -1.25zm2.5999146 1.25l2.0 -9.546875l1.171875 0l-1.984375 9.546875l-1.1875 0zm7.7268066 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm5.0843506 2.546875l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm2.609253 -7.25l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.2266846 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0zm11.2873535 0l1.25 -6.0l-1.046875 0l0.1875 -0.90625l1.046875 0l0.203125 -0.984375q0.15625 -0.734375 0.3125 -1.0625q0.171875 -0.328125 0.546875 -0.53125q0.375 -0.21875 1.015625 -0.21875q0.453125 0 1.3125 0.1875l-0.21875 1.03125q-0.59375 -0.15625 -1.0 -0.15625q-0.34375 0 -0.53125 0.171875q-0.171875 0.171875 -0.296875 0.8125l-0.15625 0.75l1.3125 0l-0.1875 0.90625l-1.3125 0l-1.265625 6.0l-1.171875 0zm3.437378 0l2.0 -9.546875l1.171875 0l-1.984375 9.546875l-1.1875 0zm3.2735596 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm7.3343506 2.734375l-0.71875 -6.90625l1.140625 0l0.3125 3.09375l0.125 1.75q0 0.1875 0.03125 0.78125q0.203125 -0.65625 0.328125 -1.015625q0.140625 -0.359375 0.4375 -1.015625l1.65625 -3.59375l1.28125 0l0.21875 3.453125q0.0625 0.78125 0.0625 2.09375q0.328125 -0.859375 1.0625 -2.421875l1.484375 -3.125l1.1875 0l-3.390625 6.90625l-1.21875 0l-0.25 -4.046875q-0.03125 -0.390625 -0.046875 -1.484375q-0.265625 0.703125 -0.625 1.515625l-1.828125 4.015625l-1.25 0zm8.421997 -2.359375l1.1875 -0.078125q0 0.515625 0.15625 0.875q0.15625 0.359375 0.578125 0.59375q0.421875 0.21875 0.96875 0.21875q0.78125 0 1.171875 -0.3125q0.390625 -0.3125 0.390625 -0.734375q0 -0.3125 -0.234375 -0.578125q-0.234375 -0.28125 -1.171875 -0.671875q-0.9375 -0.40625 -1.1875 -0.578125q-0.4375 -0.265625 -0.671875 -0.625q-0.21875 -0.359375 -0.21875 -0.828125q0 -0.8125 0.65625 -1.390625q0.65625 -0.59375 1.828125 -0.59375q1.296875 0 1.96875 0.609375q0.6875 0.59375 0.71875 1.578125l-1.15625 0.078125q-0.03125 -0.625 -0.453125 -0.984375q-0.40625 -0.375 -1.171875 -0.375q-0.609375 0 -0.953125 0.28125q-0.328125 0.28125 -0.328125 0.609375q0 0.3125 0.296875 0.5625q0.1875 0.171875 1.0 0.53125q1.359375 0.578125 1.703125 0.921875q0.5625 0.53125 0.5625 1.3125q0 0.515625 -0.3125 1.015625q-0.3125 0.484375 -0.96875 0.78125q-0.640625 0.296875 -1.515625 0.296875q-1.203125 0 -2.046875 -0.59375q-0.84375 -0.59375 -0.796875 -1.921875z" fill-rule="nonzero"/><path fill="#000000" d="m949.0678 902.10443l1.25 -6.0l-1.046875 0l0.1875 -0.90625l1.046875 0l0.203125 -0.984375q0.15625 -0.734375 0.3125 -1.0625q0.171875 -0.328125 0.546875 -0.53125q0.375 -0.21875 1.015625 -0.21875q0.453125 0 1.3125 0.1875l-0.21875 1.03125q-0.59375 -0.15625 -1.0 -0.15625q-0.34375 0 -0.53125 0.171875q-0.171875 0.171875 -0.296875 0.8125l-0.15625 0.75l1.3125 0l-0.1875 0.90625l-1.3125 0l-1.265625 6.0l-1.171875 0zm3.749878 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm8.313232 0l1.25 -6.0l-1.046875 0l0.1875 -0.90625l1.046875 0l0.203125 -0.984375q0.15625 -0.734375 0.3125 -1.0625q0.171875 -0.328125 0.546875 -0.53125q0.375 -0.21875 1.015625 -0.21875q0.453125 0 1.3125 0.1875l-0.21875 1.03125q-0.59375 -0.15625 -1.0 -0.15625q-0.34375 0 -0.53125 0.171875q-0.171875 0.171875 -0.296875 0.8125l-0.15625 0.75l1.3125 0l-0.1875 0.90625l-1.3125 0l-1.265625 6.0l-1.171875 0zm8.249817 -1.25q-1.234375 1.40625 -2.546875 1.40625q-0.796875 0 -1.296875 -0.453125q-0.484375 -0.46875 -0.484375 -1.125q0 -0.4375 0.21875 -1.5l0.84375 -3.984375l1.171875 0l-0.921875 4.40625q-0.109375 0.5625 -0.109375 0.859375q0 0.390625 0.234375 0.609375q0.234375 0.21875 0.703125 0.21875q0.484375 0 0.953125 -0.234375q0.484375 -0.234375 0.8125 -0.640625q0.34375 -0.421875 0.5625 -0.984375q0.140625 -0.359375 0.328125 -1.25l0.625 -2.984375l1.1875 0l-1.453125 6.90625l-1.078125 0l0.25 -1.25zm2.6937256 1.25l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0zm12.193726 -2.515625l1.171875 0.125q-0.4375 1.296875 -1.265625 1.921875q-0.8125 0.625 -1.84375 0.625q-1.140625 0 -1.84375 -0.71875q-0.6875 -0.734375 -0.6875 -2.046875q0 -1.125 0.4375 -2.21875q0.453125 -1.09375 1.28125 -1.65625q0.84375 -0.578125 1.921875 -0.578125q1.109375 0 1.765625 0.625q0.65625 0.625 0.65625 1.65625l-1.15625 0.078125q-0.015625 -0.65625 -0.390625 -1.015625q-0.375 -0.375 -0.984375 -0.375q-0.703125 0 -1.234375 0.453125q-0.515625 0.4375 -0.8125 1.359375q-0.296875 0.90625 -0.296875 1.75q0 0.890625 0.390625 1.34375q0.390625 0.4375 0.96875 0.4375q0.5625 0 1.078125 -0.4375q0.53125 -0.4375 0.84375 -1.328125zm4.6484375 1.5625l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm2.609253 -7.25l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.2267456 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0zm12.084351 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm2.2249756 3.5l1.999939 -9.546875l1.171875 0l-1.984314 9.546875l-1.1875 0zm8.507751 0l-1.140625 -6.90625l1.15625 0l0.59375 3.796875q0.09375 0.625 0.234375 2.046875q0.34375 -0.734375 0.859375 -1.6875l2.3125 -4.15625l1.25 0l-3.953125 6.90625l-1.3125 0zm9.9921875 -2.34375l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm5.7562256 4.0625l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm6.1103516 -8.203125l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.1798096 0l1.25 -6.0l-1.046875 0l0.1875 -0.90625l1.046875 0l0.203125 -0.984375q0.15625 -0.734375 0.3125 -1.0625q0.171875 -0.328125 0.546875 -0.53125q0.375 -0.21875 1.015625 -0.21875q0.453125 0 1.3125 0.1875l-0.21875 1.03125q-0.59375 -0.15625 -1.0 -0.15625q-0.34375 0 -0.53125 0.171875q-0.171875 0.171875 -0.296875 0.8125l-0.15625 0.75l1.3125 0l-0.1875 0.90625l-1.3125 0l-1.265625 6.0l-1.171875 0zm5.203003 -8.203125l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm7.7891846 -2.515625l1.171875 0.125q-0.4375 1.296875 -1.265625 1.921875q-0.8125 0.625 -1.84375 0.625q-1.140625 0 -1.84375 -0.71875q-0.6875 -0.734375 -0.6875 -2.046875q0 -1.125 0.4375 -2.21875q0.453125 -1.09375 1.28125 -1.65625q0.84375 -0.578125 1.921875 -0.578125q1.109375 0 1.765625 0.625q0.65625 0.625 0.65625 1.65625l-1.15625 0.078125q-0.015625 -0.65625 -0.390625 -1.015625q-0.375 -0.375 -0.984375 -0.375q-0.703125 0 -1.234375 0.453125q-0.515625 0.4375 -0.8125 1.359375q-0.296875 0.90625 -0.296875 1.75q0 0.890625 0.390625 1.34375q0.390625 0.4375 0.96875 0.4375q0.5625 0 1.078125 -0.4375q0.53125 -0.4375 0.84375 -1.328125zm6.5546875 1.65625q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm5.0843506 2.546875l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm2.609253 -7.25l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.2266846 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m368.08398 11.48294l252.62994 0l0 56.031494l-252.62994 0z" fill-rule="evenodd"/><path fill="#000000" d="m380.56836 33.28294l-1.828125 -9.546877l1.234375 0l1.09375 5.6875q0.3125 1.5625 0.40625 2.5q0.59375 -1.21875 0.984375 -1.9375l3.390625 -6.25l1.328125 0l-5.234375 9.546877l-1.375 0zm7.2738647 -8.203127l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203127l1.453125 -6.906252l1.171875 0l-1.4375 6.906252l-1.1875 0zm4.757965 0l-1.140625 -6.906252l1.15625 0l0.59375 3.796875q0.09375 0.625 0.234375 2.046877q0.34375 -0.7343769 0.859375 -1.6875019l2.3125 -4.15625l1.25 0l-3.953125 6.906252l-1.3125 0zm9.5859375 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375019q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.3281269q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640627q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.8281269q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125019 0.859375 -0.8593769q0.3125 -0.5625 0.484375 -1.578125zm6.9906006 2.500002q-1.015625 1.15625 -2.109375 1.15625q-0.984375 0 -1.640625 -0.71875q-0.65625 -0.7343769 -0.65625 -2.109377q0 -1.265625 0.515625 -2.3125q0.515625 -1.046875 1.296875 -1.5625q0.78125 -0.515625 1.5625 -0.515625q1.28125 0 1.9375 1.234375l0.78125 -3.71875l1.171875 0l-1.984375 9.546877l-1.09375 0l0.21875 -1.0zm-3.234375 -1.8906269q0 0.71875 0.140625 1.140625q0.140625 0.40625 0.484375 0.6875019q0.34375 0.28125 0.828125 0.28125q0.796875 0 1.453125 -0.8437519q0.875 -1.109375 0.875 -2.734375q0 -0.8125 -0.4375 -1.265625q-0.421875 -0.46875 -1.078125 -0.46875q-0.421875 0 -0.765625 0.1875q-0.34375 0.1875 -0.6875 0.640625q-0.34375 0.453125 -0.578125 1.15625q-0.234375 0.6875 -0.234375 1.21875zm6.1937256 0.265625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.0156269 -1.375 1.5625019q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.6406269 -0.34375 -1.4687519zm1.171875 -0.109375q0 0.96875 0.46875 1.4843769q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.4687519q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm9.8498535 2.734377l2.0 -9.546877l1.265625 0l-0.828125 3.96875l4.953125 0l0.8125 -3.96875l1.28125 0l-1.984375 9.546877l-1.28125 0l0.9375 -4.500002l-4.9375 0l-0.953125 4.500002l-1.265625 0zm9.593903 0l2.0 -9.546877l1.265625 0l-1.765625 8.468752l4.953125 0l-0.21875 1.078125l-6.234375 0zm7.8187256 -3.078127l1.25 -0.125l-0.015625 0.328125q0 0.5625 0.25 1.015625q0.265625 0.453125 0.84375 0.7187519q0.59375 0.25 1.390625 0.25q1.15625 0 1.75 -0.5000019q0.609375 -0.515625 0.609375 -1.15625q0 -0.453125 -0.328125 -0.8125q-0.328125 -0.375 -1.78125 -1.0q-1.125 -0.484375 -1.53125 -0.75q-0.640625 -0.421875 -0.953125 -0.90625q-0.296875 -0.5 -0.296875 -1.140625q0 -0.734375 0.390625 -1.328125q0.40625 -0.59375 1.1875 -0.90625q0.78125 -0.3125 1.75 -0.3125q1.171875 0 1.96875 0.390625q0.796875 0.390625 1.15625 1.046875q0.375 0.640625 0.375 1.234375q0 0.0625 -0.015625 0.203125l-1.21875 0.09375q0 -0.40625 -0.078125 -0.625q-0.125 -0.40625 -0.40625 -0.671875q-0.265625 -0.28125 -0.75 -0.4375q-0.484375 -0.171875 -1.0625 -0.171875q-1.046875 0 -1.625 0.46875q-0.453125 0.359375 -0.453125 0.953125q0 0.34375 0.1875 0.625q0.1875 0.28125 0.65625 0.546875q0.34375 0.1875 1.609375 0.75q1.03125 0.453125 1.421875 0.703125q0.515625 0.34375 0.796875 0.84375q0.28125 0.484375 0.28125 1.109375q0 0.78125 -0.46875 1.4375019q-0.46875 0.65625 -1.3125 1.015625q-0.828125 0.34375 -1.90625 0.34375q-1.609375 0 -2.640625 -0.703125q-1.03125 -0.703125 -1.03125 -2.531252zm16.764618 2.218752q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375019q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.3281269q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640627q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.8281269q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125019 0.859375 -0.8593769q0.3125 -0.5625 0.484375 -1.578125zm2.3187256 3.500002l1.453125 -6.906252l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.187502l-1.171875 0l0.921875 -4.375002q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.218752l-1.1875 0zm12.084351 -1.0q-1.015625 1.15625 -2.109375 1.15625q-0.984375 0 -1.640625 -0.71875q-0.65625 -0.7343769 -0.65625 -2.109377q0 -1.265625 0.515625 -2.3125q0.515625 -1.046875 1.296875 -1.5625q0.78125 -0.515625 1.5625 -0.515625q1.28125 0 1.9375 1.234375l0.78125 -3.71875l1.171875 0l-1.984375 9.546877l-1.09375 0l0.21875 -1.0zm-3.234375 -1.8906269q0 0.71875 0.140625 1.140625q0.140625 0.40625 0.484375 0.6875019q0.34375 0.28125 0.828125 0.28125q0.796875 0 1.453125 -0.8437519q0.875 -1.109375 0.875 -2.734375q0 -0.8125 -0.4375 -1.265625q-0.421875 -0.46875 -1.078125 -0.46875q-0.421875 0 -0.765625 0.1875q-0.34375 0.1875 -0.6875 0.640625q-0.34375 0.453125 -0.578125 1.15625q-0.234375 0.6875 -0.234375 1.21875zm12.724823 2.890627l-1.828125 -9.546877l1.234375 0l1.09375 5.6875q0.3125 1.5625 0.40625 2.5q0.59375 -1.21875 0.984375 -1.9375l3.390625 -6.25l1.328125 0l-5.234375 9.546877l-1.375 0zm7.2738647 -8.203127l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203127l1.453125 -6.906252l1.171875 0l-1.4375 6.906252l-1.1875 0zm4.757965 0l-1.140625 -6.906252l1.15625 0l0.59375 3.796875q0.09375 0.625 0.234375 2.046877q0.34375 -0.7343769 0.859375 -1.6875019l2.3125 -4.15625l1.25 0l-3.953125 6.906252l-1.3125 0zm9.5859375 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375019q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.3281269q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640627q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.8281269q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125019 0.859375 -0.8593769q0.3125 -0.5625 0.484375 -1.578125zm6.9906006 2.500002q-1.015625 1.15625 -2.109375 1.15625q-0.984375 0 -1.640625 -0.71875q-0.65625 -0.7343769 -0.65625 -2.109377q0 -1.265625 0.515625 -2.3125q0.515625 -1.046875 1.296875 -1.5625q0.78125 -0.515625 1.5625 -0.515625q1.28125 0 1.9375 1.234375l0.78125 -3.71875l1.171875 0l-1.984375 9.546877l-1.09375 0l0.21875 -1.0zm-3.234375 -1.8906269q0 0.71875 0.140625 1.140625q0.140625 0.40625 0.484375 0.6875019q0.34375 0.28125 0.828125 0.28125q0.796875 0 1.453125 -0.8437519q0.875 -1.109375 0.875 -2.734375q0 -0.8125 -0.4375 -1.265625q-0.421875 -0.46875 -1.078125 -0.46875q-0.421875 0 -0.765625 0.1875q-0.34375 0.1875 -0.6875 0.640625q-0.34375 0.453125 -0.578125 1.15625q-0.234375 0.6875 -0.234375 1.21875zm6.1937256 0.265625q0 -2.015625 1.1875 -3.34375q0.9844055 -1.09375 2.5781555 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.0156269 -1.375 1.5625019q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.4687805 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.6406269 -0.34375 -1.4687519zm1.171875 -0.109375q0 0.96875 0.46878052 1.4843769q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.4687519q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.5312805 1.171875q-0.171875 0.671875 -0.171875 1.203125zm10.052948 2.734377l2.0 -9.546877l1.265625 0l-2.0 9.546877l-1.265625 0zm3.515503 0l2.0 -9.546877l3.96875 0q1.046875 0 1.5625 0.25q0.515625 0.234375 0.84375 0.8125q0.34375 0.578125 0.34375 1.28125q0 0.59375 -0.25 1.21875q-0.234375 0.609375 -0.609375 1.0q-0.359375 0.390625 -0.734375 0.59375q-0.375 0.203125 -0.8125 0.296875q-0.90625 0.21875 -1.84375 0.21875l-2.390625 0l-0.8125 3.875002l-1.265625 0zm2.3125 -4.953127l2.09375 0q1.21875 0 1.796875 -0.265625q0.578125 -0.265625 0.921875 -0.796875q0.34375 -0.546875 0.34375 -1.15625q0 -0.46875 -0.1875 -0.75q-0.1875 -0.296875 -0.515625 -0.4375q-0.328125 -0.140625 -1.265625 -0.140625l-2.453125 0l-0.734375 3.546875zm6.7647705 4.953127l2.0 -9.546877l1.265625 0l-2.0 9.546877l-1.265625 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m50.929134 305.5748l-25.00045 0l0 564.81885l478.07132 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="1.0,3.0" d="m50.929134 305.5748l-25.000452 0l0 564.81885l472.07132 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m498.0 872.0454l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m84.46982 12.845144l112.69292 0l0 56.031494l-112.69292 0z" fill-rule="evenodd"/><path fill="#000000" d="m94.04794 34.645145l2.0 -9.546877l2.953125 0q0.8125 0 1.203125 0.078125q0.625 0.109375 1.0625 0.390625q0.453125 0.265625 0.6875 0.75q0.25 0.46875 0.25 1.046875q0 0.796875 -0.4375 1.390625q-0.4375 0.59375 -1.328125 0.90625q0.78125 0.265625 1.15625 0.78125q0.390625 0.5 0.390625 1.1875q0 0.7812519 -0.453125 1.5156269q-0.453125 0.71875 -1.203125 1.109375q-0.734375 0.390625 -1.65625 0.390625l-4.625 0zm2.421875 -5.468752l1.9375 0q1.375 0 1.984375 -0.4375q0.609375 -0.453125 0.609375 -1.296875q0 -0.390625 -0.1875 -0.6875q-0.1875 -0.296875 -0.5 -0.421875q-0.3125 -0.140625 -1.1875 -0.140625l-2.03125 0l-0.625 2.984375zm-0.921875 4.390627l2.171875 0q0.875 0 1.171875 -0.0625q0.609375 -0.109375 0.984375 -0.359375q0.375 -0.25 0.578125 -0.65625q0.21875 -0.40625 0.21875 -0.8593769q0 -0.671875 -0.421875 -1.015625q-0.40625 -0.34375 -1.59375 -0.34375l-2.421875 0l-0.6875 3.296877zm7.2491302 1.078125l1.453125 -6.906252l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765627l-1.125 0zm9.516495 -2.34375l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.5468769q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.9687519 0.4375 1.4843769q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.7187519l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm7.5062256 4.062502l-1.140625 -6.906252l1.15625 0l0.59375 3.796875q0.09375 0.6250019 0.234375 2.046877q0.34375 -0.734375 0.859375 -1.6875019l2.3125 -4.15625l1.25 0l-3.953125 6.906252l-1.3125 0zm6.5859375 -8.203127l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203127l1.453125 -6.906252l1.171875 0l-1.4375 6.906252l-1.1875 0zm5.77359 -0.953125l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015627l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.812502q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm5.6092377 0.09375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.0156269q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.7187519q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640627q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.4531269q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.5781269zm2.4281006 1.1406269l1.1875 -0.078125q0 0.515625 0.15625 0.875q0.15625 0.359375 0.578125 0.59375q0.421875 0.21875 0.96875 0.21875q0.78125 0 1.171875 -0.3125q0.390625 -0.3125 0.390625 -0.734375q0 -0.3125 -0.234375 -0.578125q-0.234375 -0.2812519 -1.171875 -0.6718769q-0.9375 -0.40625 -1.1875 -0.578125q-0.4375 -0.265625 -0.671875 -0.625q-0.21875 -0.359375 -0.21875 -0.828125q0 -0.8125 0.65625 -1.390625q0.65625 -0.59375 1.828125 -0.59375q1.296875 0 1.96875 0.609375q0.6875 0.59375 0.71875 1.578125l-1.15625 0.078125q-0.03125 -0.625 -0.453125 -0.984375q-0.40625 -0.375 -1.171875 -0.375q-0.609375 0 -0.953125 0.28125q-0.328125 0.28125 -0.328125 0.609375q0 0.3125 0.296875 0.5625q0.1875 0.171875 1.0 0.53125q1.359375 0.578125 1.703125 0.921875q0.5625 0.53125 0.5625 1.3125019q0 0.515625 -0.3125 1.015625q-0.3125 0.484375 -0.96875 0.78125q-0.640625 0.296875 -1.515625 0.296875q-1.203125 0 -2.046875 -0.59375q-0.84375 -0.59375 -0.796875 -1.921875zm15.3358 0.015625l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.5468769q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.9687519 0.4375 1.4843769q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.7187519l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm5.3031006 4.062502l3.0 -3.500002l-1.71875 -3.40625l1.296875 0l0.59375 1.21875q0.3125 0.703125 0.578125 1.328125l2.0 -2.546875l1.4375 0l-2.90625 3.484375l1.734375 3.421877l-1.296875 0l-0.6875 -1.40625q-0.21875 -0.453125 -0.5 -1.109375l-2.0625 2.515625l-1.46875 0zm6.5390625 2.65625l2.0 -9.562502l1.09375 0l-0.203125 0.953125q0.59375 -0.625 1.078125 -0.859375q0.484375 -0.25 1.015625 -0.25q0.984375 0 1.625 0.71875q0.65625 0.71875 0.65625 2.0625q0 1.078125 -0.359375 1.9687519q-0.34375 0.875 -0.875 1.421875q-0.515625 0.546875 -1.046875 0.796875q-0.53125 0.25 -1.09375 0.25q-1.25 0 -1.921875 -1.265625l-0.78125 3.765625l-1.1875 0zm2.328125 -5.484377q0 0.7812519 0.109375 1.0781269q0.171875 0.421875 0.53125 0.6875q0.375 0.25 0.875 0.25q1.015625 0 1.640625 -1.140625q0.625 -1.1406269 0.625 -2.328127q0 -0.875 -0.421875 -1.359375q-0.421875 -0.484375 -1.046875 -0.484375q-0.453125 0 -0.84375 0.25q-0.375 0.234375 -0.703125 0.703125q-0.328125 0.46875 -0.546875 1.15625q-0.21875 0.6875 -0.21875 1.1875zm5.8812256 0.2031269q0 -2.015627 1.1875 -3.343752q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.218752q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.10937691q0 0.9687519 0.46875 1.4843769q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.8750019q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734377l1.453125 -6.906252l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765627l-1.125 0zm7.2039948 -0.953125l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015627l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.812502q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m165.72704 823.5512l151.9055 0l0 56.031494l-151.9055 0z" fill-rule="evenodd"/><path fill="#000000" d="m175.38329 845.3512l2.0 -9.546875l1.234375 0l1.609375 3.578125q0.734375 1.640625 1.15625 2.765625q0.25 0.65625 0.578125 1.765625q0.171875 -1.21875 0.4375 -2.5l1.171875 -5.609375l1.25 0l-2.0 9.546875l-1.265625 0l-2.359375 -5.515625q-0.640625 -1.453125 -0.984375 -2.5625q-0.09375 0.921875 -0.375 2.28125l-1.21875 5.796875l-1.234375 0zm14.484528 -2.34375l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm8.521851 3.109375l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm2.2498627 0.953125l-0.71875 -6.90625l1.140625 0l0.3125 3.09375l0.125 1.75q0 0.1875 0.03125 0.78125q0.203125 -0.65625 0.328125 -1.015625q0.140625 -0.359375 0.4375 -1.015625l1.65625 -3.59375l1.28125 0l0.21875 3.453125q0.0625 0.78125 0.0625 2.09375q0.328125 -0.859375 1.0625 -2.421875l1.484375 -3.125l1.1875 0l-3.390625 6.90625l-1.21875 0l-0.25 -4.046875q-0.03125 -0.390625 -0.046875 -1.484375q-0.265625 0.703125 -0.625 1.515625l-1.828125 4.015625l-1.25 0zm8.531403 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm4.4539948 0l2.0 -9.546875l1.171875 0l-1.234375 5.921875l3.4375 -3.28125l1.546875 0l-2.9375 2.578125l1.78125 4.328125l-1.296875 0l-1.375 -3.546875l-1.4375 1.234375l-0.46875 2.3125l-1.1875 0zm9.7733 2.65625l2.0 -9.5625l1.09375 0l-0.203125 0.953125q0.59375 -0.625 1.078125 -0.859375q0.484375 -0.25 1.015625 -0.25q0.984375 0 1.625 0.71875q0.65625 0.71875 0.65625 2.0625q0 1.078125 -0.359375 1.96875q-0.34375 0.875 -0.875 1.421875q-0.515625 0.546875 -1.046875 0.796875q-0.53125 0.25 -1.09375 0.25q-1.25 0 -1.921875 -1.265625l-0.78125 3.765625l-1.1875 0zm2.328125 -5.484375q0 0.78125 0.109375 1.078125q0.171875 0.421875 0.53125 0.6875q0.375 0.25 0.875 0.25q1.015625 0 1.640625 -1.140625q0.625 -1.140625 0.625 -2.328125q0 -0.875 -0.421875 -1.359375q-0.421875 -0.484375 -1.046875 -0.484375q-0.453125 0 -0.84375 0.25q-0.375 0.234375 -0.703125 0.703125q-0.328125 0.46875 -0.546875 1.15625q-0.21875 0.6875 -0.21875 1.1875zm5.6624756 2.828125l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm9.516495 -2.34375l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm5.1781006 6.71875l2.0 -9.5625l1.09375 0l-0.203125 0.953125q0.59375 -0.625 1.078125 -0.859375q0.484375 -0.25 1.015625 -0.25q0.984375 0 1.625 0.71875q0.65625 0.71875 0.65625 2.0625q0 1.078125 -0.359375 1.96875q-0.34375 0.875 -0.875 1.421875q-0.515625 0.546875 -1.046875 0.796875q-0.53125 0.25 -1.09375 0.25q-1.25 0 -1.921875 -1.265625l-0.78125 3.765625l-1.1875 0zm2.328125 -5.484375q0 0.78125 0.109375 1.078125q0.171875 0.421875 0.53125 0.6875q0.375 0.25 0.875 0.25q1.015625 0 1.640625 -1.140625q0.625 -1.140625 0.625 -2.328125q0 -0.875 -0.421875 -1.359375q-0.421875 -0.484375 -1.046875 -0.484375q-0.453125 0 -0.84375 0.25q-0.375 0.234375 -0.703125 0.703125q-0.328125 0.46875 -0.546875 1.15625q-0.21875 0.6875 -0.21875 1.1875zm10.334335 1.96875q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96873474 0 -1.5624847 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0624847 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1874847 -0.109375q0.359375 -1.015625 1.1406097 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.64060974 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34373474 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm2.3187256 3.5l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm9.11026 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm5.0843506 2.546875l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125zm2.6092224 -7.25l0.28125 -1.34375l1.171875 0l-0.28125 1.34375l-1.171875 0zm-1.71875 8.203125l1.453125 -6.90625l1.171875 0l-1.4375 6.90625l-1.1875 0zm3.226715 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm6.0218506 2.734375l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m709.10236 278.00546l0 0c0 -6.250641 5.0671387 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.001251 3.3149414 8.002899l0 45.269897c0 6.2506714 -5.0671387 11.31781 -11.31781 11.31781l-141.14392 0c-6.2506714 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m709.10236 278.00546l0 0c0 -6.250641 5.0671387 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.001251 3.3149414 8.002899l0 45.269897c0 6.2506714 -5.0671387 11.31781 -11.31781 11.31781l-141.14392 0c-6.2506714 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m747.96936 293.70605l0 -1.125l4.03125 -0.015625l0 3.546875q-0.921875 0.75 -1.921875 1.125q-0.984375 0.359375 -2.03125 0.359375q-1.40625 0 -2.5625 -0.59375q-1.140625 -0.609375 -1.734375 -1.734375q-0.578125 -1.140625 -0.578125 -2.546875q0 -1.40625 0.578125 -2.609375q0.59375 -1.203125 1.6875 -1.78125q1.09375 -0.59375 2.515625 -0.59375q1.03125 0 1.859375 0.34375q0.84375 0.328125 1.3125 0.9375q0.484375 0.59375 0.734375 1.546875l-1.140625 0.3125q-0.21875 -0.71875 -0.53125 -1.140625q-0.3125 -0.421875 -0.90625 -0.671875q-0.59375 -0.25 -1.3125 -0.25q-0.875 0 -1.515625 0.265625q-0.625 0.265625 -1.015625 0.703125q-0.375 0.421875 -0.59375 0.9375q-0.359375 0.875 -0.359375 1.921875q0 1.265625 0.4375 2.125q0.4375 0.859375 1.265625 1.28125q0.84375 0.421875 1.796875 0.421875q0.8125 0 1.59375 -0.3125q0.78125 -0.328125 1.1875 -0.6875l0 -1.765625l-2.796875 0zm10.47644 1.515625l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5218506 4.125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.9696045 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.874878 -1.171875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.3811035 4.125l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm10.3116455 5.0l0 -4.046875l-3.6875 -5.5l1.546875 0l1.875 2.875q0.515625 0.8125 0.96875 1.625q0.4375 -0.75 1.046875 -1.6875l1.84375 -2.8125l1.46875 0l-3.796875 5.5l0 4.046875l-1.265625 0zm6.1865845 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm16.875183 -1.015625q0.875 0.59375 1.609375 0.875l-0.359375 0.875q-1.03125 -0.359375 -2.0625 -1.15625q-1.0625 0.578125 -2.34375 0.578125q-1.296875 0 -2.34375 -0.625q-1.046875 -0.625 -1.625 -1.75q-0.5625 -1.125 -0.5625 -2.546875q0 -1.421875 0.5625 -2.578125q0.578125 -1.15625 1.625 -1.75q1.0625 -0.609375 2.375 -0.609375q1.328125 0 2.375 0.625q1.0625 0.625 1.625 1.75q0.5625 1.125 0.5625 2.546875q0 1.1875 -0.359375 2.125q-0.359375 0.9375 -1.078125 1.640625zm-2.78125 -1.625q1.09375 0.3125 1.796875 0.921875q1.109375 -1.015625 1.109375 -3.0625q0 -1.15625 -0.390625 -2.015625q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.703125 -0.484375q-1.40625 0 -2.34375 0.96875q-0.921875 0.96875 -0.921875 2.890625q0 1.859375 0.921875 2.859375q0.921875 0.984375 2.34375 0.984375q0.6875 0 1.28125 -0.25q-0.59375 -0.390625 -1.25 -0.546875l0.3125 -0.921875z" fill-rule="nonzero"/><path fill="#000000" d="m753.3281 313.44043l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.9852295 0l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8812256 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm9.974976 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm15.836792 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm14.7404785 1.59375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm1.7109375 -0.921875q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm11.131226 3.453125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm11.365601 1.234375l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m667.7743 300.6404l41.322815 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7743 300.6404l35.322815 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m703.0971 302.29214l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m709.105 383.32678l0 0c0 -6.2505493 36.66211 -11.317596 81.887146 -11.317596c45.225037 0 81.887146 5.067047 81.887146 11.317596l0 45.270325c0 6.2505493 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317596z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m872.8793 383.32678l0 0c0 6.250519 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317566" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m709.105 383.32678l0 0c0 -6.2505493 36.66211 -11.317596 81.887146 -11.317596c45.225037 0 81.887146 5.067047 81.887146 11.317596l0 45.270325c0 6.2505493 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317596z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m872.8793 383.32678l0 0c0 6.250519 -36.66211 11.317566 -81.887146 11.317566c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317566" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m709.105 383.32678l0 0c0 -6.2505493 36.66211 -11.317596 81.887146 -11.317596c45.225037 0 81.887146 5.067047 81.887146 11.317596l0 45.270325c0 6.2505493 -36.66211 11.317596 -81.887146 11.317596c-45.225037 0 -81.887146 -5.067047 -81.887146 -11.317596z" fill-rule="evenodd"/><path fill="#000000" d="m734.2367 416.42075l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm10.311584 5.0l0 -4.046875l-3.6875 -5.5l1.546875 0l1.875 2.875q0.515625 0.8125 0.96875 1.625q0.4375 -0.75 1.046875 -1.6875l1.84375 -2.8125l1.46875 0l-3.796875 5.5l0 4.046875l-1.265625 0zm6.1866455 0l0 -9.546875l1.296875 0l5.015625 7.5l0 -7.5l1.203125 0l0 9.546875l-1.296875 0l-5.015625 -7.5l0 7.5l-1.203125 0zm16.875183 -1.015625q0.875 0.59375 1.609375 0.875l-0.359375 0.875q-1.03125 -0.359375 -2.0625 -1.15625q-1.0625 0.578125 -2.34375 0.578125q-1.296875 0 -2.34375 -0.625q-1.046875 -0.625 -1.625 -1.75q-0.5625 -1.125 -0.5625 -2.546875q0 -1.421875 0.5625 -2.578125q0.578125 -1.15625 1.625 -1.75q1.0625 -0.609375 2.375 -0.609375q1.328125 0 2.375 0.625q1.0625 0.625 1.625 1.75q0.5625 1.125 0.5625 2.546875q0 1.1875 -0.359375 2.125q-0.359375 0.9375 -1.078125 1.640625zm-2.78125 -1.625q1.09375 0.3125 1.796875 0.921875q1.109375 -1.015625 1.109375 -3.0625q0 -1.15625 -0.390625 -2.015625q-0.390625 -0.875 -1.15625 -1.34375q-0.75 -0.484375 -1.703125 -0.484375q-1.40625 0 -2.34375 0.96875q-0.921875 0.96875 -0.921875 2.890625q0 1.859375 0.921875 2.859375q0.921875 0.984375 2.34375 0.984375q0.6875 0 1.28125 -0.25q-0.59375 -0.390625 -1.25 -0.546875l0.3125 -0.921875zm9.616882 2.640625l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4210205 7.65625l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm9.2734375 -3.703125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1405029 1.046875l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm6.9749756 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm15.6310425 -2.53125l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm1.7109375 -0.921875q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm11.131226 3.453125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm11.365601 1.234375l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m790.9921 334.59317l0 37.417328" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m790.9921 334.59317l0 31.417328" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m789.3404 366.0105l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m709.10236 484.68134l0 0c0 -6.2506714 5.0671387 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002899l0 45.269897c0 6.2506104 -5.0671387 11.31781 -11.31781 11.31781l-141.14392 0c-6.2506714 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m709.10236 484.68134l0 0c0 -6.2506714 5.0671387 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924133 8.002869 3.314911c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002899l0 45.269897c0 6.2506104 -5.0671387 11.31781 -11.31781 11.31781l-141.14392 0c-6.2506714 0 -11.31781 -5.0671997 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m743.3261 512.1163l0 -8.421875l-3.140625 0l0 -1.125l7.5625 0l0 1.125l-3.15625 0l0 8.421875l-1.265625 0zm8.819763 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.0531006 2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm9.6953125 1.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm4.406006 -2.40625q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm11.1154175 0l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm11.928101 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.938416 0l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm7.9124756 3.453125l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm12.859497 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m790.9921 439.9147l0 33.448822" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m790.9921 439.9147l0 27.448822" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m789.3404 467.36353l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m667.7743 507.3176l41.322815 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m667.7743 507.3176l35.322815 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m703.0971 508.96933l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m686.67456 147.19948l194.17322 0l0 56.031494l-194.17322 0z" fill-rule="evenodd"/><path fill="#000000" d="m834.6542 168.99948l2.0 -9.546875l3.96875 0q1.046875 0 1.5625 0.25q0.515625 0.234375 0.84375 0.8125q0.34375 0.578125 0.34375 1.28125q0 0.59375 -0.25 1.21875q-0.234375 0.609375 -0.609375 1.0q-0.359375 0.390625 -0.734375 0.59375q-0.375 0.203125 -0.8125 0.296875q-0.90625 0.21875 -1.84375 0.21875l-2.390625 0l-0.8125 3.875l-1.265625 0zm2.3125 -4.953125l2.09375 0q1.21875 0 1.796875 -0.265625q0.578125 -0.265625 0.921875 -0.796875q0.34375 -0.546875 0.34375 -1.15625q0 -0.46875 -0.1875 -0.75q-0.1875 -0.296875 -0.515625 -0.4375q-0.328125 -0.140625 -1.265625 -0.140625l-2.453125 0l-0.734375 3.546875zm9.7178955 4.953125l0.765625 -3.6875l-2.921875 -5.859375l1.359375 0l1.40625 2.796875q0.46875 0.90625 0.890625 1.953125q0.484375 -0.890625 1.328125 -1.9375l2.234375 -2.8125l1.515625 0l-4.546875 5.953125l-0.75 3.59375l-1.28125 0zm5.8272705 0l2.0 -9.546875l1.234375 0l1.609375 3.578125q0.734375 1.640625 1.15625 2.765625q0.25 0.65625 0.578125 1.765625q0.171875 -1.21875 0.4375 -2.5l1.171875 -5.609375l1.25 0l-2.0 9.546875l-1.265625 0l-2.359375 -5.515625q-0.640625 -1.453125 -0.984375 -2.5625q-0.09375 0.921875 -0.375 2.28125l-1.21875 5.796875l-1.234375 0zm17.031372 -0.765625q0.46875 0.578125 1.296875 1.109375l-0.5625 0.75q-0.875 -0.5625 -1.5625 -1.375q-0.96875 0.453125 -1.84375 0.453125q-0.703125 0 -1.546875 -0.265625q-0.84375 -0.265625 -1.40625 -0.796875q-0.5625 -0.546875 -0.890625 -1.390625q-0.3125 -0.859375 -0.3125 -1.890625q0 -1.234375 0.46875 -2.375q0.40625 -0.9375 1.03125 -1.609375q0.640625 -0.6875 1.390625 -1.0625q1.0 -0.484375 2.15625 -0.484375q1.78125 0 2.90625 1.171875q1.125 1.15625 1.125 3.109375q0 1.4375 -0.609375 2.65625q-0.59375 1.203125 -1.640625 2.0zm-0.78125 -0.828125q0.78125 -0.609375 1.265625 -1.671875q0.5 -1.078125 0.5 -2.234375q0 -1.4375 -0.796875 -2.28125q-0.796875 -0.859375 -1.953125 -0.859375q-0.953125 0 -1.859375 0.5625q-0.890625 0.5625 -1.421875 1.65625q-0.53125 1.078125 -0.53125 2.359375q0 1.609375 1.0 2.484375q0.78125 0.6875 1.671875 0.6875q0.71875 0 1.28125 -0.21875q-0.59375 -0.546875 -1.21875 -0.875l0.46875 -0.78125q0.421875 0.171875 0.75 0.421875q0.34375 0.25 0.84375 0.75z" fill-rule="nonzero"/><path fill="#000000" d="m699.43085 184.99948l2.0 -9.546875l1.265625 0l-0.828125 3.96875l4.953125 0l0.8125 -3.96875l1.28125 0l-1.984375 9.546875l-1.28125 0l0.9375 -4.5l-4.9375 0l-0.953125 4.5l-1.265625 0zm14.171997 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm2.3187256 3.5l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm9.110291 -1.0q-1.015625 1.15625 -2.109375 1.15625q-0.984375 0 -1.640625 -0.71875q-0.65625 -0.734375 -0.65625 -2.109375q0 -1.265625 0.515625 -2.3125q0.515625 -1.046875 1.296875 -1.5625q0.78125 -0.515625 1.5625 -0.515625q1.28125 0 1.9375 1.234375l0.78125 -3.71875l1.171875 0l-1.984375 9.546875l-1.09375 0l0.21875 -1.0zm-3.234375 -1.890625q0 0.71875 0.140625 1.140625q0.140625 0.40625 0.484375 0.6875q0.34375 0.28125 0.828125 0.28125q0.796875 0 1.453125 -0.84375q0.875 -1.109375 0.875 -2.734375q0 -0.8125 -0.4375 -1.265625q-0.421875 -0.46875 -1.078125 -0.46875q-0.421875 0 -0.765625 0.1875q-0.34375 0.1875 -0.6875 0.640625q-0.34375 0.453125 -0.578125 1.15625q-0.234375 0.6875 -0.234375 1.21875zm7.2874756 2.890625l-0.71875 -6.90625l1.140625 0l0.3125 3.09375l0.125 1.75q0 0.1875 0.03125 0.78125q0.203125 -0.65625 0.328125 -1.015625q0.140625 -0.359375 0.4375 -1.015625l1.65625 -3.59375l1.28125 0l0.21875 3.453125q0.0625 0.78125 0.0625 2.09375q0.328125 -0.859375 1.0625 -2.421875l1.484375 -3.125l1.1875 0l-3.390625 6.90625l-1.21875 0l-0.25 -4.046875q-0.03125 -0.390625 -0.046875 -1.484375q-0.265625 0.703125 -0.625 1.515625l-1.828125 4.015625l-1.25 0zm12.984497 -0.859375q-0.625 0.53125 -1.1875 0.78125q-0.5625 0.234375 -1.203125 0.234375q-0.96875 0 -1.5625 -0.5625q-0.578125 -0.5625 -0.578125 -1.4375q0 -0.578125 0.265625 -1.015625q0.265625 -0.453125 0.703125 -0.71875q0.4375 -0.28125 1.0625 -0.390625q0.40625 -0.078125 1.515625 -0.125q1.109375 -0.046875 1.59375 -0.234375q0.125 -0.484375 0.125 -0.8125q0 -0.40625 -0.296875 -0.640625q-0.40625 -0.328125 -1.1875 -0.328125q-0.75 0 -1.21875 0.328125q-0.46875 0.328125 -0.6875 0.9375l-1.1875 -0.109375q0.359375 -1.015625 1.140625 -1.5625q0.796875 -0.546875 2.0 -0.546875q1.28125 0 2.03125 0.609375q0.578125 0.453125 0.578125 1.1875q0 0.546875 -0.15625 1.28125l-0.390625 1.71875q-0.1875 0.8125 -0.1875 1.328125q0 0.328125 0.15625 0.9375l-1.203125 0q-0.09375 -0.34375 -0.125 -0.859375zm0.421875 -2.640625q-0.234375 0.09375 -0.53125 0.15625q-0.28125 0.046875 -0.9375 0.109375q-1.03125 0.078125 -1.453125 0.21875q-0.421875 0.140625 -0.640625 0.453125q-0.21875 0.296875 -0.21875 0.671875q0 0.5 0.34375 0.828125q0.34375 0.3125 0.984375 0.3125q0.578125 0 1.109375 -0.3125q0.546875 -0.3125 0.859375 -0.859375q0.3125 -0.5625 0.484375 -1.578125zm2.3187256 3.5l1.453125 -6.90625l1.03125 0l-0.28125 1.40625q0.53125 -0.796875 1.03125 -1.171875q0.515625 -0.390625 1.046875 -0.390625q0.359375 0 0.875 0.25l-0.484375 1.09375q-0.3125 -0.21875 -0.671875 -0.21875q-0.625 0 -1.28125 0.6875q-0.640625 0.6875 -1.015625 2.484375l-0.578125 2.765625l-1.125 0zm9.5164795 -2.34375l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm14.4748535 0.296875l0.234375 -1.078125l4.171875 0l-0.8125 3.890625q-0.78125 0.484375 -1.796875 0.796875q-1.0 0.3125 -1.984375 0.3125q-2.078125 0 -3.109375 -1.25q-0.859375 -1.0625 -0.859375 -2.71875q0 -1.71875 0.75 -3.125q0.765625 -1.421875 1.921875 -2.09375q1.15625 -0.671875 2.5625 -0.671875q1.015625 0 1.8125 0.375q0.8125 0.359375 1.234375 0.984375q0.4375 0.609375 0.625 1.59375l-1.234375 0.140625q-0.203125 -0.984375 -0.84375 -1.5q-0.640625 -0.53125 -1.625 -0.53125q-1.046875 0 -1.953125 0.5625q-0.890625 0.5625 -1.421875 1.703125q-0.53125 1.125 -0.53125 2.578125q0 1.4375 0.703125 2.171875q0.71875 0.734375 1.96875 0.734375q1.234375 0 2.671875 -0.828125l0.421875 -2.046875l-2.90625 0zm10.429565 1.421875l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm5.7562256 4.0625l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0zm14.4748535 -1.546875l0 -2.609375l-2.59375 0l0 -1.09375l2.59375 0l0 -2.59375l1.109375 0l0 2.59375l2.59375 0l0 1.09375l-2.59375 0l0 2.609375l-1.109375 0zm8.283264 1.546875l1.984375 -9.546875l2.875 0q1.046875 0 1.59375 0.15625q0.78125 0.203125 1.328125 0.71875q0.546875 0.515625 0.828125 1.296875q0.28125 0.765625 0.28125 1.734375q0 1.15625 -0.34375 2.109375q-0.34375 0.9375 -0.90625 1.65625q-0.5625 0.71875 -1.1875 1.125q-0.609375 0.40625 -1.453125 0.59375q-0.640625 0.15625 -1.5625 0.15625l-3.4375 0zm1.5 -1.078125l1.515625 0q1.015625 0 1.8125 -0.1875q0.5 -0.125 0.84375 -0.34375q0.46875 -0.296875 0.84375 -0.78125q0.484375 -0.640625 0.78125 -1.453125q0.296875 -0.8125 0.296875 -1.859375q0 -1.15625 -0.40625 -1.78125q-0.390625 -0.625 -1.015625 -0.8125q-0.46875 -0.15625 -1.4375 -0.15625l-1.6875 0l-1.546875 7.375zm13.046997 -1.265625l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm5.1781006 6.71875l2.0 -9.5625l1.09375 0l-0.203125 0.953125q0.59375 -0.625 1.078125 -0.859375q0.484375 -0.25 1.015625 -0.25q0.984375 0 1.625 0.71875q0.65625 0.71875 0.65625 2.0625q0 1.078125 -0.359375 1.96875q-0.34375 0.875 -0.875 1.421875q-0.515625 0.546875 -1.046875 0.796875q-0.53125 0.25 -1.09375 0.25q-1.25 0 -1.921875 -1.265625l-0.78125 3.765625l-1.1875 0zm2.328125 -5.484375q0 0.78125 0.109375 1.078125q0.171875 0.421875 0.53125 0.6875q0.375 0.25 0.875 0.25q1.015625 0 1.640625 -1.140625q0.625 -1.140625 0.625 -2.328125q0 -0.875 -0.421875 -1.359375q-0.421875 -0.484375 -1.046875 -0.484375q-0.453125 0 -0.84375 0.25q-0.375 0.234375 -0.703125 0.703125q-0.328125 0.46875 -0.546875 1.15625q-0.21875 0.6875 -0.21875 1.1875zm5.5687256 2.828125l2.0 -9.546875l1.171875 0l-1.984375 9.546875l-1.1875 0zm3.2736206 -2.625q0 -2.015625 1.1875 -3.34375q0.984375 -1.09375 2.578125 -1.09375q1.25 0 2.015625 0.78125q0.765625 0.78125 0.765625 2.109375q0 1.1875 -0.484375 2.21875q-0.484375 1.015625 -1.375 1.5625q-0.890625 0.546875 -1.875 0.546875q-0.796875 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0 -0.96875q-0.34375 -0.640625 -0.34375 -1.46875zm1.171875 -0.109375q0 0.96875 0.46875 1.484375q0.46875 0.5 1.1875 0.5q0.375 0 0.75 -0.15625q0.375 -0.15625 0.6875 -0.46875q0.328125 -0.3125 0.546875 -0.703125q0.21875 -0.40625 0.359375 -0.875q0.203125 -0.640625 0.203125 -1.234375q0 -0.9375 -0.46875 -1.453125q-0.46875 -0.515625 -1.1875 -0.515625q-0.5625 0 -1.015625 0.265625q-0.453125 0.265625 -0.828125 0.78125q-0.359375 0.5 -0.53125 1.171875q-0.171875 0.671875 -0.171875 1.203125zm5.5843506 5.40625l0.078125 -1.109375q0.359375 0.109375 0.703125 0.109375q0.359375 0 0.578125 -0.171875q0.28125 -0.21875 0.625 -0.796875l0.375 -0.6875l-1.15625 -6.921875l1.171875 0l0.515625 3.484375q0.15625 1.046875 0.265625 2.078125l3.09375 -5.5625l1.234375 0l-4.40625 7.828125q-0.640625 1.140625 -1.140625 1.515625q-0.5 0.375 -1.140625 0.375q-0.40625 0 -0.796875 -0.140625zm7.1015625 -2.671875l1.4375 -6.90625l1.1875 0l-0.25 1.140625q0.65625 -0.75 1.171875 -1.015625q0.53125 -0.28125 1.125 -0.28125q0.65625 0 1.078125 0.34375q0.4375 0.328125 0.578125 0.953125q0.515625 -0.65625 1.09375 -0.96875q0.59375 -0.328125 1.234375 -0.328125q0.875 0 1.296875 0.40625q0.4375 0.40625 0.4375 1.15625q0 0.3125 -0.15625 1.046875l-0.921875 4.453125l-1.171875 0l0.9375 -4.5625q0.125 -0.5625 0.125 -0.8125q0 -0.328125 -0.21875 -0.515625q-0.203125 -0.203125 -0.59375 -0.203125q-0.53125 0 -1.078125 0.328125q-0.546875 0.3125 -0.859375 0.828125q-0.296875 0.515625 -0.515625 1.59375l-0.703125 3.34375l-1.1875 0l0.984375 -4.671875q0.109375 -0.46875 0.109375 -0.671875q0 -0.34375 -0.21875 -0.546875q-0.21875 -0.203125 -0.546875 -0.203125q-0.5 0 -1.0625 0.328125q-0.546875 0.3125 -0.890625 0.875q-0.34375 0.5625 -0.5625 1.625l-0.6875 3.265625l-1.171875 0zm16.180542 -2.34375l1.15625 0.109375q-0.25 0.859375 -1.140625 1.625q-0.890625 0.765625 -2.125 0.765625q-0.765625 0 -1.40625 -0.34375q-0.640625 -0.359375 -0.984375 -1.03125q-0.328125 -0.6875 -0.328125 -1.546875q0 -1.140625 0.515625 -2.203125q0.53125 -1.0625 1.359375 -1.578125q0.84375 -0.515625 1.8125 -0.515625q1.234375 0 1.96875 0.765625q0.75 0.765625 0.75 2.09375q0 0.5 -0.09375 1.046875l-5.09375 0q-0.03125 0.1875 -0.03125 0.359375q0 0.96875 0.4375 1.484375q0.453125 0.5 1.109375 0.5q0.59375 0 1.171875 -0.390625q0.59375 -0.390625 0.921875 -1.140625zm-3.421875 -1.71875l3.875 0q0.015625 -0.1875 0.015625 -0.265625q0 -0.875 -0.453125 -1.34375q-0.4375 -0.484375 -1.125 -0.484375q-0.765625 0 -1.390625 0.53125q-0.609375 0.515625 -0.921875 1.5625zm5.7562256 4.0625l1.453125 -6.90625l1.0625 0l-0.25 1.203125q0.6875 -0.71875 1.296875 -1.03125q0.609375 -0.328125 1.234375 -0.328125q0.84375 0 1.3125 0.453125q0.484375 0.453125 0.484375 1.21875q0 0.375 -0.171875 1.203125l-0.875 4.1875l-1.171875 0l0.921875 -4.375q0.125 -0.640625 0.125 -0.953125q0 -0.34375 -0.234375 -0.546875q-0.234375 -0.21875 -0.6875 -0.21875q-0.90625 0 -1.609375 0.65625q-0.703125 0.640625 -1.03125 2.21875l-0.671875 3.21875l-1.1875 0zm10.178101 -0.953125l-0.203125 0.953125q-0.421875 0.109375 -0.8125 0.109375q-0.703125 0 -1.125 -0.34375q-0.3125 -0.25 -0.3125 -0.703125q0 -0.234375 0.171875 -1.046875l0.828125 -4.015625l-0.921875 0l0.1875 -0.90625l0.9375 0l0.34375 -1.703125l1.359375 -0.8125l-0.53125 2.515625l1.15625 0l-0.1875 0.90625l-1.15625 0l-0.796875 3.8125q-0.15625 0.734375 -0.15625 0.875q0 0.21875 0.109375 0.328125q0.125 0.109375 0.40625 0.109375q0.390625 0 0.703125 -0.078125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m437.4147 193.23097l23.585571 0l0 505.41736l42.9971 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="1.0,3.0" d="m437.4147 193.23097l23.585571 0l0 505.41736l36.9971 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m497.99738 700.30005l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m508.87665 581.6065l0 0c0 -6.2506104 5.0671387 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924438 8.002869 3.3149414c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002869l0 45.269897c0 6.2506714 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.2506714 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m508.87665 581.6065l0 0c0 -6.2506104 5.0671387 -11.31781 11.31781 -11.31781l141.14392 0c3.001648 0 5.880371 1.1924438 8.002869 3.3149414c2.1224976 2.1224976 3.3149414 5.0012207 3.3149414 8.002869l0 45.269897c0 6.2506714 -5.0671997 11.31781 -11.31781 11.31781l-141.14392 0c-6.2506714 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m550.32294 601.04144l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm7.4522705 5.0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5218506 6.78125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm10.865601 2.5625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm10.2092285 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm7.0164795 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1248779 1.046875l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.5079956 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.1484375 -6.125l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9454346 0l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m558.33887 619.85394q-0.984375 -1.234375 -1.65625 -2.875q-0.65625 -1.640625 -0.65625 -3.390625q0 -1.546875 0.5 -2.96875q0.578125 -1.640625 1.8125 -3.28125l0.828125 0q-0.78125 1.359375 -1.03125 1.9375q-0.40625 0.890625 -0.625 1.875q-0.28125 1.21875 -0.28125 2.4375q0 3.140625 1.9375 6.265625l-0.828125 0zm1.7196045 -4.875l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm9.6953125 1.015625l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.5079346 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.656128 -1.484375l1.15625 0.15625q-0.1875 1.1875 -0.96875 1.859375q-0.78125 0.671875 -1.921875 0.671875q-1.40625 0 -2.28125 -0.921875q-0.859375 -0.9375 -0.859375 -2.65625q0 -1.125 0.375 -1.96875q0.375 -0.84375 1.125 -1.25q0.765625 -0.421875 1.65625 -0.421875q1.125 0 1.84375 0.578125q0.71875 0.5625 0.921875 1.609375l-1.140625 0.171875q-0.171875 -0.703125 -0.59375 -1.046875q-0.40625 -0.359375 -0.984375 -0.359375q-0.890625 0 -1.453125 0.640625q-0.546875 0.640625 -0.546875 2.0q0 1.40625 0.53125 2.03125q0.546875 0.625 1.40625 0.625q0.6875 0 1.140625 -0.421875q0.46875 -0.421875 0.59375 -1.296875zm2.1484375 2.53125l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm11.006226 4.125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm10.7092285 3.453125l0 -9.546875l1.25 0l0 9.546875l-1.25 0zm3.484253 0l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm8.233459 7.8125l-0.828125 0q1.9375 -3.125 1.9375 -6.265625q0 -1.21875 -0.28125 -2.421875q-0.21875 -0.984375 -0.609375 -1.875q-0.265625 -0.59375 -1.046875 -1.953125l0.828125 0q1.234375 1.640625 1.8125 3.28125q0.5 1.421875 0.5 2.96875q0 1.75 -0.671875 3.390625q-0.671875 1.640625 -1.640625 2.875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m721.5801 581.6063l0 0c0 -6.2505493 36.66211 -11.317627 81.887085 -11.317627c45.225037 0 81.887146 5.0670776 81.887146 11.317627l0 45.270325c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670166 -81.887085 -11.317566z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m885.3543 581.6063l0 0c0 6.2504883 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670776 -81.887085 -11.317566" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m721.5801 581.6063l0 0c0 -6.2505493 36.66211 -11.317627 81.887085 -11.317627c45.225037 0 81.887146 5.0670776 81.887146 11.317627l0 45.270325c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670166 -81.887085 -11.317566z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m885.3543 581.6063l0 0c0 6.2504883 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670776 -81.887085 -11.317566" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m721.5801 581.6063l0 0c0 -6.2505493 36.66211 -11.317627 81.887085 -11.317627c45.225037 0 81.887146 5.0670776 81.887146 11.317627l0 45.270325c0 6.2505493 -36.66211 11.317566 -81.887146 11.317566c-45.224976 0 -81.887085 -5.0670166 -81.887085 -11.317566z" fill-rule="evenodd"/><path fill="#000000" d="m741.97156 606.70026l0 -9.546875l6.4375 0l0 1.125l-5.171875 0l0 2.96875l4.46875 0l0 1.125l-4.46875 0l0 4.328125l-1.265625 0zm12.453857 0l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8656006 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.9610596 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.5236206 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm4.8914795 2.859375l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm12.146851 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.084351 3.078125l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm2.421753 1.046875l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm7.906372 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696655 0l0 -9.546875l1.171875 0l0 5.453125l2.765625 -2.8125l1.515625 0l-2.640625 2.5625l2.90625 4.34375l-1.4375 0l-2.28125 -3.53125l-0.828125 0.796875l0 2.734375l-1.171875 0zm13.226379 0l-3.6875 -9.546875l1.359375 0l2.484375 6.9375q0.296875 0.828125 0.5 1.5625q0.21875 -0.78125 0.515625 -1.5625l2.578125 -6.9375l1.28125 0l-3.734375 9.546875l-1.296875 0zm10.013489 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm4.4696045 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.9298706 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.4923096 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm0.7030029 -2.40625q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6312256 3.453125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m786.1917 622.70026l0 -6.90625l1.046875 0l0 0.96875q0.328125 -0.515625 0.859375 -0.8125q0.546875 -0.3125 1.234375 -0.3125q0.78125 0 1.265625 0.3125q0.484375 0.3125 0.6875 0.890625q0.828125 -1.203125 2.140625 -1.203125q1.03125 0 1.578125 0.578125q0.5625 0.5625 0.5625 1.734375l0 4.75l-1.171875 0l0 -4.359375q0 -0.703125 -0.125 -1.0q-0.109375 -0.3125 -0.40625 -0.5q-0.296875 -0.1875 -0.703125 -0.1875q-0.71875 0 -1.203125 0.484375q-0.484375 0.484375 -0.484375 1.546875l0 4.015625l-1.171875 0l0 -4.484375q0 -0.78125 -0.296875 -1.171875q-0.28125 -0.390625 -0.921875 -0.390625q-0.5 0 -0.921875 0.265625q-0.421875 0.25 -0.609375 0.75q-0.1875 0.5 -0.1875 1.453125l0 3.578125l-1.171875 0zm10.664917 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm11.131226 3.453125l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0zm11.365601 1.234375l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m672.6562 604.24146l48.91339 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m672.6562 604.24146l42.91339 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m715.5696 605.8932l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m885.3543 604.24146l125.921265 0l0 60.472473" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m885.3543 604.24146l125.921265 0l0 54.472473" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m1009.62384 658.7139l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#cfe2f3" d="m273.63647 679.8703l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1923828 8.002899 3.3149414c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.002869l0 45.269897c0 6.2506714 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.63647 679.8703l0 0c0 -6.2506714 5.067169 -11.31781 11.31781 -11.31781l141.14392 0c3.0016785 0 5.8804016 1.1923828 8.002899 3.3149414c2.1224976 2.1224976 3.314911 5.0012207 3.314911 8.002869l0 45.269897c0 6.2506714 -5.067169 11.31781 -11.31781 11.31781l-141.14392 0c-6.250641 0 -11.31781 -5.0671387 -11.31781 -11.31781z" fill-rule="evenodd"/><path fill="#000000" d="m295.43872 707.30524l0 -9.546875l3.28125 0q1.109375 0 1.703125 0.140625q0.8125 0.1875 1.390625 0.6875q0.765625 0.640625 1.140625 1.640625q0.375 0.984375 0.375 2.25q0 1.09375 -0.265625 1.9375q-0.25 0.828125 -0.65625 1.390625q-0.390625 0.546875 -0.859375 0.859375q-0.46875 0.3125 -1.140625 0.484375q-0.65625 0.15625 -1.53125 0.15625l-3.4375 0zm1.265625 -1.125l2.03125 0q0.9375 0 1.46875 -0.171875q0.546875 -0.1875 0.875 -0.5q0.4375 -0.453125 0.6875 -1.203125q0.25 -0.765625 0.25 -1.84375q0 -1.5 -0.5 -2.296875q-0.484375 -0.8125 -1.1875 -1.078125q-0.5 -0.203125 -1.625 -0.203125l-2.0 0l0 7.296875zm12.718903 0.265625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm5.5531006 2.421875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.656128 0.1875q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm3.2718506 3.46875l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm3.4060974 0l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm2.539215 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm7.9281006 3.453125l-2.125 -6.90625l1.21875 0l1.09375 3.984375l0.421875 1.484375q0.015625 -0.109375 0.359375 -1.421875l1.09375 -4.046875l1.203125 0l1.03125 4.0l0.34375 1.328125l0.40625 -1.34375l1.171875 -3.984375l1.140625 0l-2.15625 6.90625l-1.21875 0l-1.09375 -4.140625l-0.265625 -1.171875l-1.40625 5.3125l-1.21875 0zm12.2031555 0l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm11.98349 4.140625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm7.01651 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1560974 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm5.507965 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm1.1561279 -7.140625l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.507965 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.4281006 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.945465 0l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm7.1937256 0.578125l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m273.6391 771.72046l0 0c0 -6.2504883 36.66214 -11.317566 81.887146 -11.317566c45.225006 0 81.887146 5.0670776 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.4134 771.72046l0 0c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m273.6391 771.72046l0 0c0 -6.2504883 36.66214 -11.317566 81.887146 -11.317566c45.225006 0 81.887146 5.0670776 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m437.4134 771.72046l0 0c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.6391 771.72046l0 0c0 -6.2504883 36.66214 -11.317566 81.887146 -11.317566c45.225006 0 81.887146 5.0670776 81.887146 11.317566l0 45.270325c0 6.2505493 -36.66214 11.317627 -81.887146 11.317627c-45.225006 0 -81.887146 -5.0670776 -81.887146 -11.317627z" fill-rule="evenodd"/><path fill="#000000" d="m318.0276 796.81445l0 -9.546875l3.59375 0q0.953125 0 1.453125 0.09375q0.703125 0.125 1.171875 0.453125q0.484375 0.328125 0.765625 0.921875q0.296875 0.59375 0.296875 1.296875q0 1.21875 -0.78125 2.0625q-0.765625 0.84375 -2.796875 0.84375l-2.4375 0l0 3.875l-1.265625 0zm1.265625 -5.0l2.453125 0q1.234375 0 1.75 -0.453125q0.515625 -0.46875 0.515625 -1.28125q0 -0.609375 -0.3125 -1.03125q-0.296875 -0.421875 -0.796875 -0.5625q-0.3125 -0.09375 -1.171875 -0.09375l-2.4375 0l0 3.421875zm11.9835205 4.140625q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9749756 3.46875l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.1883545 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5218506 4.125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm9.974976 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm4.6247253 1.625l1.140625 0.15625q0.078125 0.53125 0.40625 0.78125q0.4375 0.3125 1.1875 0.3125q0.8125 0 1.25 -0.328125q0.453125 -0.3125 0.609375 -0.90625q0.09375 -0.359375 0.078125 -1.5q-0.765625 0.90625 -1.90625 0.90625q-1.4375 0 -2.21875 -1.03125q-0.78125 -1.03125 -0.78125 -2.46875q0 -0.984375 0.359375 -1.8125q0.359375 -0.84375 1.03125 -1.296875q0.6875 -0.453125 1.609375 -0.453125q1.21875 0 2.015625 0.984375l0 -0.828125l1.078125 0l0 5.96875q0 1.609375 -0.328125 2.28125q-0.328125 0.6875 -1.046875 1.078125q-0.703125 0.390625 -1.75 0.390625q-1.234375 0 -2.0 -0.5625q-0.75 -0.5625 -0.734375 -1.671875zm0.984375 -4.15625q0 1.359375 0.53125 1.984375q0.546875 0.625 1.359375 0.625q0.796875 0 1.34375 -0.625q0.546875 -0.625 0.546875 -1.953125q0 -1.265625 -0.5625 -1.90625q-0.5625 -0.640625 -1.359375 -0.640625q-0.765625 0 -1.3125 0.640625q-0.546875 0.625 -0.546875 1.875zm6.6312256 3.578125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm8.969635 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 6.125l0 -9.5625l1.078125 0l0 0.890625q0.375 -0.53125 0.84375 -0.78125q0.484375 -0.265625 1.15625 -0.265625q0.875 0 1.546875 0.453125q0.6875 0.453125 1.03125 1.28125q0.34375 0.828125 0.34375 1.828125q0 1.046875 -0.375 1.90625q-0.375 0.84375 -1.109375 1.296875q-0.71875 0.453125 -1.53125 0.453125q-0.578125 0 -1.046875 -0.25q-0.46875 -0.25 -0.765625 -0.625l0 3.375l-1.171875 0zm1.0625 -6.078125q0 1.34375 0.53125 1.984375q0.546875 0.625 1.3125 0.625q0.78125 0 1.34375 -0.65625q0.5625 -0.65625 0.5625 -2.046875q0 -1.3125 -0.546875 -1.96875q-0.546875 -0.671875 -1.296875 -0.671875q-0.75 0 -1.328125 0.703125q-0.578125 0.703125 -0.578125 2.03125zm6.3499756 3.421875l0 -9.546875l1.171875 0l0 3.421875q0.828125 -0.9375 2.078125 -0.9375q0.765625 0 1.328125 0.296875q0.5625 0.296875 0.8125 0.84375q0.25 0.53125 0.25 1.546875l0 4.375l-1.171875 0l0 -4.375q0 -0.890625 -0.390625 -1.28125q-0.375 -0.40625 -1.078125 -0.40625q-0.515625 0 -0.984375 0.28125q-0.453125 0.265625 -0.65625 0.734375q-0.1875 0.453125 -0.1875 1.265625l0 3.78125l-1.171875 0z" fill-rule="nonzero"/><path fill="#000000" d="m308.28354 815.62695q-0.984375 -1.234375 -1.65625 -2.875q-0.65625 -1.640625 -0.65625 -3.390625q0 -1.546875 0.5 -2.96875q0.578125 -1.640625 1.8125 -3.28125l0.828125 0q-0.78125 1.359375 -1.03125 1.9375q-0.40625 0.890625 -0.625 1.875q-0.28125 1.21875 -0.28125 2.4375q0 3.140625 1.9375 6.265625l-0.828125 0zm2.1883545 -2.8125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm6.9749756 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm6.9593506 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm5.07901 2.859375l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858368 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852295 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625zm7.8984375 4.875l-0.828125 0q1.9375 -3.125 1.9375 -6.265625q0 -1.21875 -0.28125 -2.421875q-0.21875 -0.984375 -0.609375 -1.875q-0.265625 -0.59375 -1.046875 -1.953125l0.828125 0q1.234375 1.640625 1.8125 3.28125q0.5 1.421875 0.5 2.96875q0 1.75 -0.671875 3.390625q-0.671875 1.640625 -1.640625 2.875z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m50.93176 679.87006l0 0c0 -6.2504883 36.66212 -11.317566 81.88713 -11.317566c45.22502 0 81.887146 5.0670776 81.887146 11.317566l0 45.270386c0 6.2504883 -36.662125 11.317566 -81.887146 11.317566c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317566z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m214.70604 679.87006l0 0c0 6.2505493 -36.662125 11.317627 -81.887146 11.317627c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317627" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m50.93176 679.87006l0 0c0 -6.2504883 36.66212 -11.317566 81.88713 -11.317566c45.22502 0 81.887146 5.0670776 81.887146 11.317566l0 45.270386c0 6.2504883 -36.662125 11.317566 -81.887146 11.317566c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317566z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m214.70604 679.87006l0 0c0 6.2505493 -36.662125 11.317627 -81.887146 11.317627c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317627" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m50.93176 679.87006l0 0c0 -6.2504883 36.66212 -11.317566 81.88713 -11.317566c45.22502 0 81.887146 5.0670776 81.887146 11.317566l0 45.270386c0 6.2504883 -36.662125 11.317566 -81.887146 11.317566c-45.225014 0 -81.88713 -5.0670776 -81.88713 -11.317566z" fill-rule="evenodd"/><path fill="#000000" d="m76.7617 704.96405l0 -9.546875l1.90625 0l2.25 6.765625q0.3125 0.9375 0.46875 1.40625q0.15625 -0.515625 0.5 -1.53125l2.28125 -6.640625l1.703125 0l0 9.546875l-1.21875 0l0 -7.984375l-2.765625 7.984375l-1.140625 0l-2.765625 -8.125l0 8.125l-1.21875 0zm11.008682 -8.1875l0 -1.359375l1.171875 0l0 1.359375l-1.171875 0zm0 8.1875l0 -6.90625l1.171875 0l0 6.90625l-1.171875 0zm2.164215 0l2.53125 -3.59375l-2.34375 -3.3125l1.46875 0l1.0625 1.609375q0.296875 0.46875 0.484375 0.78125q0.28125 -0.4375 0.515625 -0.765625l1.171875 -1.625l1.40625 0l-2.390625 3.25l2.5625 3.65625l-1.4375 0l-1.421875 -2.140625l-0.375 -0.59375l-1.8125 2.734375l-1.421875 0zm10.0078125 -1.046875l0.171875 1.03125q-0.5 0.109375 -0.890625 0.109375q-0.640625 0 -1.0 -0.203125q-0.34375 -0.203125 -0.484375 -0.53125q-0.140625 -0.328125 -0.140625 -1.390625l0 -3.96875l-0.859375 0l0 -0.90625l0.859375 0l0 -1.71875l1.171875 -0.703125l0 2.421875l1.171875 0l0 0.90625l-1.171875 0l0 4.046875q0 0.5 0.046875 0.640625q0.0625 0.140625 0.203125 0.234375q0.140625 0.078125 0.40625 0.078125q0.203125 0 0.515625 -0.046875zm5.6717377 1.046875l0 -1.015625q-0.8125 1.171875 -2.1875 1.171875q-0.609375 0 -1.140625 -0.234375q-0.53125 -0.234375 -0.796875 -0.578125q-0.25 -0.359375 -0.359375 -0.875q-0.0625 -0.34375 -0.0625 -1.09375l0 -4.28125l1.171875 0l0 3.828125q0 0.921875 0.0625 1.234375q0.109375 0.46875 0.46875 0.734375q0.359375 0.25 0.890625 0.25q0.515625 0 0.984375 -0.265625q0.46875 -0.265625 0.65625 -0.734375q0.1875 -0.46875 0.1875 -1.34375l0 -3.703125l1.171875 0l0 6.90625l-1.046875 0zm2.8656006 0l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm9.18837 -2.21875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm9.787338 0.671875q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.9281006 3.453125l0 -6.0l-1.03125 0l0 -0.90625l1.03125 0l0 -0.734375q0 -0.703125 0.125 -1.046875q0.171875 -0.453125 0.59375 -0.734375q0.421875 -0.28125 1.203125 -0.28125q0.484375 0 1.09375 0.109375l-0.1875 1.03125q-0.359375 -0.0625 -0.6875 -0.0625q-0.53125 0 -0.75 0.234375q-0.21875 0.21875 -0.21875 0.84375l0 0.640625l1.34375 0l0 0.90625l-1.34375 0l0 6.0l-1.171875 0zm7.3122253 0l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm17.389618 2.203125q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9906006 3.46875l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875153 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.81251526 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm11.896866 0l0 -0.875q-0.65625 1.03125 -1.9375 1.03125q-0.8125 0 -1.515625 -0.453125q-0.6875 -0.453125 -1.078125 -1.265625q-0.375 -0.828125 -0.375 -1.890625q0 -1.03125 0.34375 -1.875q0.34375 -0.84375 1.03125 -1.28125q0.703125 -0.453125 1.546875 -0.453125q0.625 0 1.109375 0.265625q0.5 0.25 0.796875 0.671875l0 -3.421875l1.171875 0l0 9.546875l-1.09375 0zm-3.703125 -3.453125q0 1.328125 0.5625 1.984375q0.5625 0.65625 1.328125 0.65625q0.765625 0 1.296875 -0.625q0.53125 -0.625 0.53125 -1.90625q0 -1.421875 -0.546875 -2.078125q-0.546875 -0.671875 -1.34375 -0.671875q-0.78125 0 -1.3125 0.640625q-0.515625 0.625 -0.515625 2.0z" fill-rule="nonzero"/><path fill="#000000" d="m87.76454 720.96405l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm6.9749756 -3.453125q0 -1.921875 1.078125 -2.84375q0.890625 -0.765625 2.171875 -0.765625q1.421875 0 2.328125 0.9375q0.90625 0.921875 0.90625 2.578125q0 1.328125 -0.40625 2.09375q-0.390625 0.765625 -1.15625 1.1875q-0.765625 0.421875 -1.671875 0.421875q-1.453125 0 -2.359375 -0.921875q-0.890625 -0.9375 -0.890625 -2.6875zm1.203125 0q0 1.328125 0.578125 1.984375q0.59375 0.65625 1.46875 0.65625q0.875 0 1.453125 -0.65625q0.578125 -0.671875 0.578125 -2.03125q0 -1.28125 -0.59375 -1.9375q-0.578125 -0.65625 -1.4375 -0.65625q-0.875 0 -1.46875 0.65625q-0.578125 0.65625 -0.578125 1.984375zm6.6468506 3.453125l0 -6.90625l1.0625 0l0 0.984375q0.75 -1.140625 2.1875 -1.140625q0.625 0 1.15625 0.21875q0.53125 0.21875 0.78125 0.59375q0.265625 0.359375 0.375 0.859375q0.0625 0.328125 0.0625 1.140625l0 4.25l-1.171875 0l0 -4.203125q0 -0.71875 -0.140625 -1.0625q-0.140625 -0.359375 -0.484375 -0.5625q-0.34375 -0.21875 -0.8125 -0.21875q-0.75 0 -1.296875 0.46875q-0.546875 0.46875 -0.546875 1.796875l0 3.78125l-1.171875 0zm6.9593506 -2.859375l0 -1.1875l3.59375 0l0 1.1875l-3.59375 0zm5.0789948 2.859375l0 -9.546875l1.265625 0l0 3.921875l4.953125 0l0 -3.921875l1.265625 0l0 9.546875l-1.265625 0l0 -4.5l-4.953125 0l0 4.5l-1.265625 0zm9.531403 0l0 -9.546875l1.265625 0l0 8.421875l4.703125 0l0 1.125l-5.96875 0zm7.0374756 -3.0625l1.203125 -0.109375q0.078125 0.71875 0.390625 1.1875q0.3125 0.453125 0.953125 0.734375q0.65625 0.28125 1.46875 0.28125q0.71875 0 1.265625 -0.21875q0.5625 -0.21875 0.828125 -0.578125q0.265625 -0.375 0.265625 -0.828125q0 -0.453125 -0.265625 -0.78125q-0.25 -0.328125 -0.84375 -0.5625q-0.390625 -0.15625 -1.703125 -0.46875q-1.3125 -0.3125 -1.84375 -0.59375q-0.671875 -0.359375 -1.015625 -0.890625q-0.328125 -0.53125 -0.328125 -1.1875q0 -0.71875 0.40625 -1.34375q0.40625 -0.625 1.1875 -0.953125q0.796875 -0.328125 1.765625 -0.328125q1.046875 0 1.859375 0.34375q0.8125 0.34375 1.25 1.015625q0.4375 0.65625 0.46875 1.484375l-1.203125 0.09375q-0.109375 -0.90625 -0.671875 -1.359375q-0.5625 -0.46875 -1.65625 -0.46875q-1.140625 0 -1.671875 0.421875q-0.515625 0.421875 -0.515625 1.015625q0 0.515625 0.359375 0.84375q0.375 0.328125 1.90625 0.6875q1.546875 0.34375 2.109375 0.59375q0.84375 0.390625 1.234375 0.984375q0.390625 0.578125 0.390625 1.359375q0 0.75 -0.4375 1.4375q-0.421875 0.671875 -1.25 1.046875q-0.8125 0.359375 -1.828125 0.359375q-1.296875 0 -2.171875 -0.375q-0.875 -0.375 -1.375 -1.125q-0.5 -0.765625 -0.53125 -1.71875zm12.858368 3.0625l0 -9.546875l1.171875 0l0 9.546875l-1.171875 0zm7.49234 -0.859375q-0.65625 0.5625 -1.265625 0.796875q-0.59375 0.21875 -1.28125 0.21875q-1.140625 0 -1.75 -0.546875q-0.609375 -0.5625 -0.609375 -1.4375q0 -0.5 0.21875 -0.921875q0.234375 -0.421875 0.609375 -0.671875q0.375 -0.25 0.84375 -0.390625q0.34375 -0.078125 1.046875 -0.171875q1.421875 -0.171875 2.09375 -0.40625q0 -0.234375 0 -0.296875q0 -0.71875 -0.328125 -1.015625q-0.453125 -0.390625 -1.34375 -0.390625q-0.8125 0 -1.21875 0.296875q-0.390625 0.28125 -0.578125 1.015625l-1.140625 -0.15625q0.15625 -0.734375 0.515625 -1.1875q0.359375 -0.453125 1.03125 -0.6875q0.671875 -0.25 1.5625 -0.25q0.890625 0 1.4375 0.203125q0.5625 0.203125 0.8125 0.53125q0.265625 0.3125 0.375 0.796875q0.046875 0.296875 0.046875 1.078125l0 1.5625q0 1.625 0.078125 2.0625q0.078125 0.4375 0.296875 0.828125l-1.21875 0q-0.1875 -0.359375 -0.234375 -0.859375zm-0.09375 -2.609375q-0.640625 0.265625 -1.921875 0.4375q-0.71875 0.109375 -1.015625 0.25q-0.296875 0.125 -0.46875 0.375q-0.15625 0.25 -0.15625 0.546875q0 0.46875 0.34375 0.78125q0.359375 0.3125 1.046875 0.3125q0.671875 0 1.203125 -0.296875q0.53125 -0.296875 0.78125 -0.8125q0.1875 -0.390625 0.1875 -1.171875l0 -0.421875zm2.9437256 6.125l-0.125 -1.09375q0.375 0.109375 0.65625 0.109375q0.390625 0 0.625 -0.140625q0.234375 -0.125 0.390625 -0.359375q0.109375 -0.171875 0.359375 -0.875q0.03125 -0.09375 0.109375 -0.28125l-2.625 -6.921875l1.265625 0l1.4375 4.0q0.28125 0.765625 0.5 1.59375q0.203125 -0.796875 0.46875 -1.578125l1.484375 -4.015625l1.171875 0l-2.625 7.015625q-0.421875 1.140625 -0.65625 1.578125q-0.3125 0.578125 -0.71875 0.84375q-0.40625 0.28125 -0.96875 0.28125q-0.328125 0 -0.75 -0.15625zm11.4453125 -4.875l1.203125 0.140625q-0.28125 1.0625 -1.0625 1.65625q-0.765625 0.578125 -1.96875 0.578125q-1.515625 0 -2.40625 -0.9375q-0.890625 -0.9375 -0.890625 -2.609375q0 -1.75 0.890625 -2.703125q0.90625 -0.96875 2.34375 -0.96875q1.390625 0 2.265625 0.9375q0.875 0.9375 0.875 2.65625q0 0.109375 0 0.3125l-5.15625 0q0.0625 1.140625 0.640625 1.75q0.578125 0.59375 1.4375 0.59375q0.65625 0 1.109375 -0.328125q0.453125 -0.34375 0.71875 -1.078125zm-3.84375 -1.90625l3.859375 0q-0.078125 -0.859375 -0.4375 -1.296875q-0.5625 -0.6875 -1.453125 -0.6875q-0.8125 0 -1.359375 0.546875q-0.546875 0.53125 -0.609375 1.4375zm6.5062256 4.125l0 -6.90625l1.0625 0l0 1.046875q0.40625 -0.734375 0.734375 -0.96875q0.34375 -0.234375 0.765625 -0.234375q0.59375 0 1.203125 0.375l-0.40625 1.078125q-0.4375 -0.25 -0.859375 -0.25q-0.390625 0 -0.703125 0.234375q-0.296875 0.234375 -0.421875 0.640625q-0.203125 0.625 -0.203125 1.359375l0 3.625l-1.171875 0zm3.9852448 -2.0625l1.15625 -0.1875q0.109375 0.703125 0.546875 1.078125q0.453125 0.359375 1.25 0.359375q0.8125 0 1.203125 -0.328125q0.390625 -0.328125 0.390625 -0.765625q0 -0.390625 -0.359375 -0.625q-0.234375 -0.15625 -1.1875 -0.390625q-1.296875 -0.328125 -1.796875 -0.5625q-0.484375 -0.25 -0.75 -0.65625q-0.25 -0.421875 -0.25 -0.9375q0 -0.453125 0.203125 -0.84375q0.21875 -0.40625 0.578125 -0.671875q0.28125 -0.1875 0.75 -0.328125q0.46875 -0.140625 1.015625 -0.140625q0.8125 0 1.421875 0.234375q0.609375 0.234375 0.90625 0.640625q0.296875 0.390625 0.40625 1.0625l-1.140625 0.15625q-0.078125 -0.53125 -0.453125 -0.828125q-0.375 -0.3125 -1.0625 -0.3125q-0.8125 0 -1.15625 0.265625q-0.34375 0.265625 -0.34375 0.625q0 0.234375 0.140625 0.421875q0.15625 0.1875 0.453125 0.3125q0.171875 0.0625 1.03125 0.296875q1.25 0.328125 1.734375 0.546875q0.5 0.203125 0.78125 0.609375q0.28125 0.40625 0.28125 1.0q0 0.59375 -0.34375 1.109375q-0.34375 0.515625 -1.0 0.796875q-0.640625 0.28125 -1.453125 0.28125q-1.34375 0 -2.046875 -0.5625q-0.703125 -0.5625 -0.90625 -1.65625z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m132.81627 644.60895l0 23.937012" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.81627 644.60895l0 17.937012" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m131.16454 662.54596l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m214.70604 702.50525l58.929123 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m214.70604 702.50525l52.929123 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m267.63516 704.157l4.5381165 -1.6517334l-4.5381165 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52625 736.458l0 23.937012" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52625 736.458l0 17.937012" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m353.8745 754.395l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m355.52625 668.5525l0 -23.937012" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m355.52625 668.5525l0 -17.937012" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.17798 650.6155l-1.6517334 -4.538086l-1.6517334 4.538086z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m585.88715 114.8399l0 29.663582l-111.887054 0l0 459.75372l34.87918 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="1.0,3.0" d="m585.88715 114.8399l0 29.663582l-111.887054 0l0 459.75372l28.87918 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m502.87927 605.90894l4.538086 -1.6517334l-4.538086 -1.6517334z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.4147 403.979l16.249512 0l0 383.59058l50.33316 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="1.0,3.0" d="m437.4147 403.979l16.249512 0l0 383.59058l50.33316 0" fill-rule="evenodd"/></g></svg> diff --git a/notebooks/end2end_example/finn-hw-arch.png b/notebooks/end2end_example/finn-hw-arch.png new file mode 100755 index 0000000000000000000000000000000000000000..e5631ab97d0d6bdce91aea4b916a7c7a2780560d Binary files /dev/null and b/notebooks/end2end_example/finn-hw-arch.png differ diff --git a/notebooks/end2end_example/pynq_shell_project.png b/notebooks/end2end_example/pynq_shell_project.png new file mode 100644 index 0000000000000000000000000000000000000000..b1f3c5e4f6231ca692fddc2e6a1e14cdea49dc20 Binary files /dev/null and b/notebooks/end2end_example/pynq_shell_project.png differ diff --git a/notebooks/end2end_example/stitched_ip.png b/notebooks/end2end_example/stitched_ip.png new file mode 100644 index 0000000000000000000000000000000000000000..64b96d3451f33ebad99befcf903aba9c57052f79 Binary files /dev/null and b/notebooks/end2end_example/stitched_ip.png differ diff --git a/notebooks/end2end_example/tfc_end2end_example.ipynb b/notebooks/end2end_example/tfc_end2end_example.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6fad8193c10a8ac8d2857f6c89b93f292ebb0a9c --- /dev/null +++ b/notebooks/end2end_example/tfc_end2end_example.ipynb @@ -0,0 +1,1762 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# FINN - End-to-End Flow\n", + "-----------------------------------------------------------------\n", + "\n", + "In this notebook, we will show how to take a simple, binarized, fully-connected network trained on the MNIST data set and take it all the way down to a customized bitfile running on a PYNQ board. \n", + "\n", + "This notebook is quite lengthy, and some of the cells (involving Vivado synthesis) may take up to an hour to finish running. To let you save and resume your progress, we will save the intermediate ONNX models that are generated in the various steps to disk, so that you can jump back directly to where you left off.\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Overview\n", + "\n", + "The FINN compiler comes with many *transformations* that modify the ONNX representation of the network according to certain patterns. This notebook will demonstrate a *possible* sequence of such transformations to take a particular trained network all the way down to hardware, as shown in the figure below." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The cylinder-like fields show the state of the network representation in the respective step. The rectangular fields represent the transformations that are applied to the network to achieve a certain result. The diagram is divided into 5 blocks, each of it includes several flow steps. The flow starts in top left corner with Brevitas export (purple block), followed by the preparation of the network (grey block) for the Vivado HLS synthesis and Vivado IPI stitching (yellow block), and finally building a PYNQ overlay bitfile and testing it on a PYNQ board (pink block).\n", + "There is an additional section for functional verification (green block), which we will not cover in this notebook.\n", + "\n", + "\n", + "This Jupyter notebook is organized based on the sections described above. We will use the following helper functions, `showSrc` to show source code of FINN library calls and `showInNetron` to show the ONNX model at the current transformation step. The Netron displays are interactive, but they only work when running the notebook actively and not on GitHub (i.e. if you are viewing this on GitHub you'll only see blank squares)." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import inspect\n", + "import netron\n", + "from finn.util.basic import make_build_dir\n", + "from IPython.display import IFrame\n", + "\n", + "def showSrc(what):\n", + " print(\"\".join(inspect.getsourcelines(what)[0]))\n", + " \n", + "def showInNetron(model_filename):\n", + " netron.start(model_filename, port=8081, host=\"0.0.0.0\")\n", + " return IFrame(src=\"http://0.0.0.0:8081/\", width=\"100%\", height=400)\n", + " \n", + "build_dir = \"/workspace/finn\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Outline\n", + "-------------\n", + "1. [Brevitas export](#brev_exp)\n", + "2. [Network preparation](#nw_prep)\n", + " * Basic transformations\n", + " * Streamlining\n", + " * Conversion to HLS layers\n", + " * Folding\n", + "3. [Vivado HLS and Vivado IPI](#vivado)\n", + " * HLS IP per layer\n", + " * Creation of stitched design\n", + "4. [Synthesize, Deploy and Test on PYNQ](#hw_test)\n", + " * PYNQ shell project\n", + " * Synthesis, place and route\n", + " * Driver generation\n", + " * Deployment and remote execution" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Brevitas export <a id='brev_exp'></a>\n", + "FINN expects an ONNX model as input. This can be a model trained with [Brevitas](https://github.com/Xilinx/brevitas). Brevitas is a PyTorch library for quantization-aware training and the FINN Docker image comes with several [example Brevitas networks](https://github.com/maltanar/brevitas_cnv_lfc). To show the FINN end-to-end flow, we'll use the TFC-w1a1 model as example network.\n", + "\n", + "First a few things have to be imported. Then the model can be loaded with the pretrained weights." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import onnx\n", + "from finn.util.test import get_test_model_trained\n", + "import brevitas.onnx as bo\n", + "\n", + "tfc = get_test_model_trained(\"TFC\", 1, 1)\n", + "bo.export_finn_onnx(tfc, (1, 1, 28, 28), build_dir+\"/tfc_w1_a1.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The model was now exported, loaded with the pretrained weights and saved under the name \"lfc_w1_a1.onnx\".\n", + "To visualize the exported model, Netron can be used. Netron is a visualizer for neural networks and allows interactive investigation of network properties. For example, you can click on the individual nodes and view the properties." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f186ccfbe10>" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "showInNetron(build_dir+\"/tfc_w1_a1.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have the model in .onnx format, we can work with it using FINN. For that FINN `ModelWrapper` is used. It is a wrapper around the ONNX model which provides several helper functions to make it easier to work with the model." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.core.modelwrapper import ModelWrapper\n", + "model = ModelWrapper(build_dir+\"/tfc_w1_a1.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now the model is prepared and could be simulated using Python. How this works is described in subsection [Simulation using Python](#simpy) in the section about *Simulation & Emulation flows for functional verification*.\n", + "\n", + "The model can now also be processed in different ways. The principle of FINN are analysis and transformation passes, which can be applied to the model. An analysis pass extracts specific information about the model and returns it to the user in the form of a dictionary. A transformation pass changes the model and returns the changed model back to the FINN flow.\n", + "\n", + "Since the goal in this notebook is to process the model to such an extent that a bitstream can be generated from it, the focus is on the transformations that are necessary for this. In the next section these are discussed in more detail." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Network preparation <a id='nw_prep'></a>\n", + "\n", + "* [Tidy-up transformations](#basic_trafo)\n", + "* [Streamlining](#streamline)\n", + "* [Conversion to HLS layers](#hls_layers)\n", + "* [Folding](#folding)\n", + "\n", + "\n", + "In this section, we will put the network through a series of transformations that puts it in a form that can be stitched together to form a FINN-style dataflow architecture, yielding a high-performance, high-efficiency FPGA accelerator." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### FINN-style Dataflow Architectures\n", + "\n", + "We start with a quick recap of FINN-style dataflow architectures. The key idea in such architectures is to parallelize across layers as well as within layers by dedicating a proportionate amount of compute resources to each layer, as illustrated in the figure below taken from the [FINN-R paper](https://arxiv.org/pdf/1809.04570.pdf):\n", + "\n", + "\n", + "\n", + "In practice, the compute arrays are instantiated by function calls to optimized Vivado HLS building blocks from the [finn-hlslib](https://github.com/Xilinx/finn-hlslib) library. As these function calls can only handle certain patterns/cases, we need to transform the network into an appropriate form so that we can replace network layers with these function calls, which is the goal of the network preparation process." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tidy-up transformations <a id='basic_trafo'></a>\n", + "This section deals with some basic transformations, which are applied to the model like a kind of \"tidy-up\" to make it easier to be processed. They do not appear in the diagram above, but they are applied in many steps in the FINN flow to postprocess the model after a transformation and/or to prepare it for the next transformation." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These transformations are:\n", + "* GiveUniqueNodeNames\n", + "* GiveReadableTensorNames\n", + "* InferShapes\n", + "* InferDataTypes\n", + "* FoldConstants" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the first two transformations (`GiveUniqueNodeNames`, `GiveReadableTensorNames`) the nodes in the graph are first given unique (by enumeration) names, then the tensors are given human-readable names (based on the node names). The following two transformations (`InferShapes`, `InferDataTypes`) derive the shapes and data types of the tensors from the model properties and set them in the `ValueInfo` of the model. These transformations can almost always be applied without negative effects and do not affect the structure of the graph, ensuring that all the information needed is available.\n", + "\n", + "The last listed transformation is `FoldConstants`, which performs constant folding. It identifies a node with constant inputs and determines its output. The result is then set as constant-only inputs for the following node and the old node is removed. Although this transformation changes the structure of the model, it is a transformation that is usually always desired and can be applied to any model." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These transformations can be imported and applied as follows." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames\n", + "from finn.transformation.infer_shapes import InferShapes\n", + "from finn.transformation.infer_datatypes import InferDataTypes\n", + "from finn.transformation.fold_constants import FoldConstants\n", + "\n", + "model = model.transform(InferShapes())\n", + "model = model.transform(FoldConstants())\n", + "model = model.transform(GiveUniqueNodeNames())\n", + "model = model.transform(GiveReadableTensorNames())\n", + "model = model.transform(InferDataTypes())\n", + "\n", + "model.save(build_dir+\"/tfc_w1_a1_tidy.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The result of these transformations can be viewed with netron after the model has been saved again. By clicking on the individual nodes, it can now be seen, for example, that each node has been given a name. Also the whole upper area could be folded, so that now the first node is \"Reshape\"." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1_tidy.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f186e386240>" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "showInNetron(build_dir+\"/tfc_w1_a1_tidy.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Streamlining <a id='streamline'></a>\n", + "Streamlining is a transformation containing several sub-transformations. The goal of streamlining is to eliminate floating point operations by moving them around, then collapsing them into one operation and in the last step transform them into multi-thresholding nodes. For more information on the theoretical background of this, see [this paper](https://arxiv.org/pdf/1709.04060).\n", + "\n", + "Let's have a look at which sub-transformations `Streamline` consists of:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "class Streamline(Transformation):\n", + " \"\"\"Apply the streamlining transform, see arXiv:1709.04060.\"\"\"\n", + "\n", + " def apply(self, model):\n", + " streamline_transformations = [\n", + " ConvertSubToAdd(),\n", + " BatchNormToAffine(),\n", + " ConvertSignToThres(),\n", + " MoveAddPastMul(),\n", + " MoveScalarAddPastMatMul(),\n", + " MoveScalarMulPastMatMul(),\n", + " MoveAddPastMul(),\n", + " CollapseRepeatedAdd(),\n", + " CollapseRepeatedMul(),\n", + " AbsorbAddIntoMultiThreshold(),\n", + " FactorOutMulSignMagnitude(),\n", + " AbsorbMulIntoMultiThreshold(),\n", + " Absorb1BitMulIntoMatMul(),\n", + " RoundAndClipThresholds(),\n", + " ]\n", + " for trn in streamline_transformations:\n", + " model = model.transform(trn)\n", + " model = model.transform(GiveUniqueNodeNames())\n", + " model = model.transform(GiveReadableTensorNames())\n", + " model = model.transform(InferDataTypes())\n", + " return (model, False)\n", + "\n" + ] + } + ], + "source": [ + "from finn.transformation.streamline import Streamline\n", + "showSrc(Streamline)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As can be seen, several transformations are involved in the streamlining transformation. There are move and collapse transformations. In the last step the operations are transformed into multithresholds. The involved transformations can be viewed in detail [here](https://github.com/Xilinx/finn/tree/dev/src/finn/transformation/streamline). After each transformation, three of the tidy-up transformations (`GiveUniqueNodeNames`, `GiveReadableTensorNames` and `InferDataTypes`) are applied to the model.\n", + "\n", + "After streamlining the network looks as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1_streamlined.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f186cd470b8>" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model = ModelWrapper(build_dir+\"/tfc_w1_a1_tidy.onnx\")\n", + "model = model.transform(Streamline())\n", + "model.save(build_dir+\"/tfc_w1_a1_streamlined.onnx\")\n", + "showInNetron(build_dir+\"/tfc_w1_a1_streamlined.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can see that the network has become simplified considerably compared to the previous step -- a lot of nodes have disappeared between the `MatMul` layers, and the `Sign` nodes have been replaced with `MultiThreshold` nodes instead. \n", + "\n", + "**The current implementation of streamlining is highly network-specific and may not work for your network if its topology is very different than the example network here. We hope to rectify this in future releases.**\n", + "\n", + "Our example network is a quantized network with 1-bit bipolar (-1, +1 values) precision, and we want FINN to implement them as XNOR-popcount operations [as described in the original FINN paper](https://arxiv.org/pdf/1612.07119). For this reason, after streamlining, the resulting bipolar matrix multiplications are converted into xnorpopcount operations. This transformation produces operations that are again collapsed and converted into thresholds. This procedure is shown below. " + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1a1_ready_for_hls_conversion.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f17f04bbc18>" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount\n", + "import finn.transformation.streamline.absorb as absorb\n", + "from finn.transformation.streamline.round_thresholds import RoundAndClipThresholds\n", + "\n", + "model = model.transform(ConvertBipolarMatMulToXnorPopcount())\n", + "model = model.transform(absorb.AbsorbAddIntoMultiThreshold())\n", + "model = model.transform(absorb.AbsorbMulIntoMultiThreshold())\n", + "model = model.transform(RoundAndClipThresholds())\n", + "\n", + "model.save(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")\n", + "showInNetron(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Observe the pairs of `XnorPopcountmatMul` and `MultiThreshold` layers following each other -- this is the particular pattern that the next step will be looking for in order to convert them to HLS layers." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Conversion to HLS layers <a id='hls_layers'></a>\n", + "Converts the nodes to HLS layers that correspond to the functions in [finn-hls library](https://finn-hlslib.readthedocs.io/en/latest/). In our case this transformation onverts pairs of binary XnorPopcountMatMul layers to StreamingFCLayer_Batch layers. Any immediately following MultiThreshold layers will also be absorbed into the MVTU.\n", + "\n", + "Below is the code for the transformation and the network is visualized using netron to create the new structure with `StreamingFCLayer_Batch` nodes, which will correspond to a function call from the [finn-hlslib](https://finn-hlslib.readthedocs.io/en/latest/library/fclayer.html#_CPPv4I_j_j_j_j000_i_i000E22StreamingFCLayer_BatchvRN3hls6streamI7ap_uintI9InStreamWEEERN3hls6streamI7ap_uintI10OutStreamWEEERK2TWRK2TAKjRK1R) library." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1_hls_layers.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f1868061eb8>" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls\n", + "model = ModelWrapper(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")\n", + "model = model.transform(to_hls.InferBinaryStreamingFCLayer())\n", + "model.save(build_dir+\"/tfc_w1_a1_hls_layers.onnx\")\n", + "showInNetron(build_dir+\"/tfc_w1_a1_hls_layers.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Each StreamingFCLayer_Batch node has two attributes that specify the degree of folding, PE and SIMD. In all nodes the values for these attributes are set as default to 1, which would correspond to a maximum folding (time multiplexing) and thus minimum performance. We will shortly cover how these can be adjusted, but first we want to separate the HLS layers from the non-HLS layers in this network." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Creating a Dataflow Partition <a id='dataflow_partition'></a>\n", + "\n", + "In the graph above, you can see that there is a mixture of FINN HLS layers (StreamingFCLayer_Batch) with regular ONNX layers (Reshape, Mul, Add). To create a bitstream, FINN needs a model with only HLS layers. In order to achieve this, we will use the `CreateDataflowPartition` transformation to create a \"dataflow partition\" in this graph, separating out the HLS layers into another model, and replacing them with a placeholder layer called StreamingDataflowPartition:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1_dataflow_parent.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f186cc55e48>" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from finn.transformation.fpgadataflow.create_dataflow_partition import CreateDataflowPartition\n", + "\n", + "model = ModelWrapper(build_dir+\"/tfc_w1_a1_hls_layers.onnx\")\n", + "parent_model = model.transform(CreateDataflowPartition())\n", + "parent_model.save(build_dir+\"/tfc_w1_a1_dataflow_parent.onnx\")\n", + "showInNetron(build_dir+\"/tfc_w1_a1_dataflow_parent.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can see that the StreamingFCLayer instances have all been replaced with a single `StreamingDataflowPartition`, which has an attribute `model` that points to the extracted, HLS dataflow-only graph:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/tmp/finn_maltanar/dataflow_partition_h1c4i5gn/df_model.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f17f04c70f0>" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from finn.custom_op.registry import getCustomOp\n", + "sdp_node = getCustomOp(parent_model.graph.node[2])\n", + "dataflow_model_filename = sdp_node.get_nodeattr(\"model\")\n", + "showInNetron(dataflow_model_filename)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can see all the extracted `StreamingFCLayer` instances have been moved to the child (dataflow) model. We will load the child model with `ModelWrapper` and continue working on it." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "model = ModelWrapper(dataflow_model_filename)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Folding and TLastMarker Insertion <a id='folding'></a>\n", + "\n", + "*Folding* in FINN describes how much a layer is time-multiplexed in terms of execution resources. There are several *folding factors* for each layer, controlled by the PE (parallelization over outputs) and SIMD (parallelization over inputs) parameters as described by the original [FINN paper](https://arxiv.org/pdf/1612.07119). The higher the PE and SIMD values are set, the faster the generated accelerator will run, and the more FPGA resources it will consume. \n", + "\n", + "Since the folding parameters are node attributes, they can be easily accessed and changed using a helper function of the `ModelWrapper`. But first we have to extract the nodes which are StreamingFCLayer_Batch operations. This is where the Netron visualization helps us, in the above diagram we can see that the first four nodes are StreamingFCLayer_Batch. Through the `print`s we can check if the extracted nodes all have the op_type \"StreamingFCLayer_Batch\"." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "fc0 has the op_type: StreamingFCLayer_Batch\n", + "fc1 has the op_type: StreamingFCLayer_Batch\n", + "fc2 has the op_type: StreamingFCLayer_Batch\n", + "fc3 has the op_type: StreamingFCLayer_Batch\n" + ] + } + ], + "source": [ + "fc0 = model.graph.node[0]\n", + "fc1 = model.graph.node[1]\n", + "fc2 = model.graph.node[2]\n", + "fc3 = model.graph.node[3]\n", + "print(\"fc0 has the op_type: \" + str(fc0.op_type))\n", + "print(\"fc1 has the op_type: \" + str(fc1.op_type))\n", + "print(\"fc2 has the op_type: \" + str(fc2.op_type))\n", + "print(\"fc3 has the op_type: \" + str(fc3.op_type))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can use the higher-level [HLSCustomOp](https://github.com/Xilinx/finn/blob/master/src/finn/custom_op/fpgadataflow/__init__.py) wrappers for these nodes. These wrappers provide easy access to specific properties of these nodes, such as the folding factors (PE and SIMD). Let's have a look at which node attributes are defined by the CustomOp wrapper, and adjust the SIMD and PE attributes." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CustomOp wrapper is of class StreamingFCLayer_Batch\n" + ] + }, + { + "data": { + "text/plain": [ + "{'PE': ('i', True, 0),\n", + " 'SIMD': ('i', True, 0),\n", + " 'MW': ('i', True, 0),\n", + " 'MH': ('i', True, 0),\n", + " 'resType': ('s', True, ''),\n", + " 'ActVal': ('i', False, 0),\n", + " 'inputDataType': ('s', True, ''),\n", + " 'weightDataType': ('s', True, ''),\n", + " 'outputDataType': ('s', True, ''),\n", + " 'binaryXnorMode': ('i', False, 0),\n", + " 'noActivation': ('i', False, 0),\n", + " 'inFIFODepth': ('i', False, 0),\n", + " 'outFIFODepth': ('i', False, 0),\n", + " 'backend': ('s', True, 'fpgadataflow'),\n", + " 'code_gen_dir_npysim': ('s', False, ''),\n", + " 'code_gen_dir_ipgen': ('s', False, ''),\n", + " 'executable_path': ('s', False, ''),\n", + " 'ipgen_path': ('s', False, ''),\n", + " 'exec_mode': ('s', False, ''),\n", + " 'sim_cycles': ('i', False, 0),\n", + " 'rtlsim_trace': ('s', False, '')}" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fc0w = getCustomOp(fc0)\n", + "fc1w = getCustomOp(fc1)\n", + "fc2w = getCustomOp(fc2)\n", + "fc3w = getCustomOp(fc3)\n", + "\n", + "print(\"CustomOp wrapper is of class \" + fc0w.__class__.__name__)\n", + "\n", + "fc0w.get_nodeattr_types()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can see that the PE and SIMD are listed as node attributes, as well as the depths of the FIFOs that will be inserted between consecutive layers, and all can be adjusted using `set_nodeattr` subject to certain constraints.\n", + "**In this notebook we are setting the folding factors and FIFO depths manually, but in a future version we will support determining the folding factors given an FPGA resource budget according to the analytical model from the [FINN-R paper](https://arxiv.org/pdf/1809.04570).**" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "# SIMD controls the folding over the input vector\n", + "# PE controls the folding over the output vector\n", + "\n", + "fc0w.set_nodeattr(\"inFIFODepth\", 50)\n", + "fc0w.set_nodeattr(\"SIMD\", 16)\n", + "fc0w.set_nodeattr(\"PE\", 16)\n", + "fc0w.set_nodeattr(\"outFIFODepth\", 4)\n", + "\n", + "fc1w.set_nodeattr(\"SIMD\", 16)\n", + "fc1w.set_nodeattr(\"PE\", 16)\n", + "fc1w.set_nodeattr(\"outFIFODepth\", 4)\n", + "\n", + "fc2w.set_nodeattr(\"SIMD\", 16)\n", + "fc2w.set_nodeattr(\"PE\", 16)\n", + "fc2w.set_nodeattr(\"outFIFODepth\", 4)\n", + "\n", + "fc3w.set_nodeattr(\"SIMD\", 16)\n", + "fc3w.set_nodeattr(\"PE\", 10)\n", + "fc3w.set_nodeattr(\"outFIFODepth\", 50)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we will run the `InsertTLastMarker` transformation to get a `TLastMarker` node at the output of this graph, which is necessary to run the DMA engines correctly. " + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1_set_folding_factors.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f1868061d30>" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker\n", + "model = model.transform(InsertTLastMarker())\n", + "model.save(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")\n", + "showInNetron(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This completes the network preparation and the network can be passed on to the next block *Vivado HLS and Vivado synthesis*, which is described below." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Vivado HLS and Vivado IPI <a id='vivado'></a>\n", + "* [Generating HLS Code](#hls_per_layer)\n", + "* [Synthesizing HLS to IP Blocks](#hls_synth)\n", + "* [IP Stitching](#ip_stitching)\n", + "\n", + "As we will be dealing with FPGA synthesis tools in these tasks, we'll define two helper variables that describe the Xilinx FPGA part name and the PYNQ board name that we are targeting." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['Ultra96', 'Pynq-Z1'])\n" + ] + } + ], + "source": [ + "# print the names of the supported PYNQ boards\n", + "from finn.util.basic import pynq_part_map\n", + "print(pynq_part_map.keys())" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "# change this if you have a different PYNQ board, see list above\n", + "pynq_board = \"Ultra96\"\n", + "fpga_part = pynq_part_map[pynq_board]\n", + "target_clk_ns = 5" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generating HLS Code <a id='hls_per_layer'></a>\n", + "This section deals with the generation of an IP block from the different layers. These can then be stitched to a block design that corresponds to the complete model. The single conversion into IP blocks allows a good transparency and we can check the functionality of each IP block and compare it with the behaviour of the corresponding ONNX node. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Two transformations are required to generate HLS IP blocks for each layer: \n", + "* `CodeGen_ipgen` which generates the HLS C++ code for the node and a tcl-script which starts the HLS synthesis and exports the design as IP. \n", + "* `HLSSynth_IPGen` which passes the tcl-script to Vivado HLS and thus performs the actual IP generation. \n", + "\n", + "We start off by giving unique node names using the basic transformation `GiveUniqueNodeNames`, and then proceed with the HLS C++ code generation with `CodeGen_ipgen`." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "model = ModelWrapper(build_dir+\"/tfc_w1_a1_set_folding_factors.onnx\")\n", + "model = model.transform(GiveUniqueNodeNames())\n", + "\n", + "from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen\n", + "model = model.transform(CodeGen_ipgen(fpga_part, target_clk_ns))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Synthesizing HLS to IP Blocks <a id='hls_synth'></a>\n", + "\n", + "Now that we have generated the HLS code for each layer, we can call the `HLSSynth_IPGen` transformation to convert the generated HLS into Vivado IP blocks. **As this involves calling HLS synthesis, this transformation will run for some time (several minutes).**" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen\n", + "\n", + "model = model.transform(HLSSynth_IPGen())\n", + "model.save(build_dir+\"/tfc_w1_a1_ipgen.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Each `StreamingFCLayer_Batch` node now has new attributes which can be examined more closely with netron." + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving '/workspace/finn/tfc_w1_a1_ipgen.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7f17f04c9470>" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "showInNetron(build_dir+\"/tfc_w1_a1_ipgen.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There are two additional attributes: \n", + "* `code_gen_dir_ipgen` which contains the directory path where all the files generated by the ipgen transformations are stored\n", + "* `ipgen_path` which contains the path to the project directory in which the generated IP block is stored\n", + "\n", + "We can further investigate which files are produced by taking a look in this directory. For example for the first StreamingFCLayer_Batch node." + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "hls_syn_StreamingFCLayer_Batch_0.tcl thresh.h\r\n", + "ipgen.sh\t\t\t top_StreamingFCLayer_Batch_0.cpp\r\n", + "params.h\t\t\t vivado_hls.log\r\n", + "project_StreamingFCLayer_Batch_0\r\n" + ] + } + ], + "source": [ + "fc0w = getCustomOp(model.graph.node[0])\n", + "code_gen_dir = fc0w.get_nodeattr(\"code_gen_dir_ipgen\")\n", + "!ls {code_gen_dir}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Directory *project_StreamingFCLayer_Batch_0* contains the project created by Vivado HLS into which the IP Block is exported, along with other files generated by Vivado HLS. If we compare it to the above visualization of the network with netron, this is exactly the name of the folder stored in the node attribute `ipgen_path`. The .cpp code that is passed to Vivado HLS can be found in the file *top_StreamingFCLayer_Batch_0.cpp*. The files *params.h* and *thresh.h* belong to that as well, they contain the values for the weights and thresholds. *vivado_hls.log* is the log file from Vivado HLS. Besides these files, the folder contains *ipgen.sh* and *hls_syn_StreamingFCLayer_Batch_0.tcl*. First we take a look at *ipgen.sh*." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#!/bin/bash \r\n", + "cd /tmp/finn_maltanar/code_gen_ipgen_StreamingFCLayer_Batch_5f0hmok_\r\n", + "vivado_hls /tmp/finn_maltanar/code_gen_ipgen_StreamingFCLayer_Batch_5f0hmok_/hls_syn_StreamingFCLayer_Batch_0.tcl\r\n", + "cd /workspace/finn\r\n" + ] + } + ], + "source": [ + "shell_script = code_gen_dir + \"/ipgen.sh\"\n", + "!cat {shell_script}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The script consists only of two framing `cd` commands and a command to pass the tcl script to *vivado_hls*. The directory has to be changed to create the files in the correct folder and will then be changed back to the original directory. \n", + "\n", + "Below is the tcl script which is passed to *vivado_hls*." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\r\n", + "set config_proj_name project_StreamingFCLayer_Batch_0\r\n", + "puts \"HLS project: $config_proj_name\"\r\n", + "set config_hwsrcdir \"/tmp/finn_maltanar/code_gen_ipgen_StreamingFCLayer_Batch_5f0hmok_\"\r\n", + "puts \"HW source dir: $config_hwsrcdir\"\r\n", + "set config_proj_part \"xczu3eg-sbva484-1-e\"\r\n", + "\r\n", + "set config_bnnlibdir \"/workspace/finn-hlslib\"\r\n", + "\r\n", + "set config_toplevelfxn \"StreamingFCLayer_Batch_0\"\r\n", + "set config_clkperiod 5\r\n", + "\r\n", + "open_project $config_proj_name\r\n", + "add_files $config_hwsrcdir/top_StreamingFCLayer_Batch_0.cpp -cflags \"-std=c++0x -I$config_bnnlibdir\"\r\n", + "\r\n", + "set_top $config_toplevelfxn\r\n", + "open_solution sol1\r\n", + "set_part $config_proj_part\r\n", + "\r\n", + "config_interface -m_axi_addr64\r\n", + "config_rtl -auto_prefix\r\n", + "\r\n", + "create_clock -period $config_clkperiod -name default\r\n", + "csynth_design\r\n", + "export_design -format ip_catalog\r\n", + "exit 0\r\n" + ] + } + ], + "source": [ + "tcl_script = code_gen_dir + \"/hls_syn_StreamingFCLayer_Batch_0.tcl\"\n", + "!cat {tcl_script}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the first part of the script the project is configured. For example the FPGA part and the clock are set. Then the project is opened and the files are added. The toplevel function is set and after creating a clock, the design is first synthesized with `csynth` and then exported as an IP block.\n", + "\n", + "Now that all IP blocks are in place, they can be stitched together to create an IP design that matches the ONNX model. This is covered in the next section." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### IP Stitching <a id='ip_stitching'></a>\n", + "\n", + "We now have IP blocks for each of our layers, and will stitch them together into a larger IP that implements the whole network using the `CodeGen_ipstitch` transformation. Bear in mind that this transformation can only be applied on a graph that only contains HLS nodes that already have been through the `HLSSynth_IPGen` transformation, which is the last step we performed. Prior to calling IP stitching, we'll also use the `ReplaceVerilogRelPaths` transformation to convert any relative `$readmemh` paths in the generated IP blocks to absolute ones, which prevents errors later on. **This step invokes Vivado and may take a few minutes to run.**" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.codegen_ipstitch import CodeGen_ipstitch\n", + "from finn.transformation.fpgadataflow.replace_verilog_relpaths import ReplaceVerilogRelPaths\n", + "model = ModelWrapper(build_dir+\"/tfc_w1_a1_ipgen.onnx\")\n", + "model = model.transform(ReplaceVerilogRelPaths())\n", + "model = model.transform(CodeGen_ipstitch(fpga_part))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you examine the nodes themselves on the transformed model you won't see a difference, because the IP stitching adds model-level metadata to the graph. This can be accessed using the `.model.metadata_props`, the `get_metadata_prop` function in `ModelWrapper`, or by clicking on the global input/output tensors in Netron." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[key: \"vivado_stitch_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo\"\n", + ", key: \"vivado_stitch_vlnv\"\n", + "value: \"xilinx_finn:finn:finn_design:1.0\"\n", + ", key: \"wrapper_filename\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n", + "]" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.model.metadata_props" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo'" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.get_metadata_prop(\"vivado_stitch_proj\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you navigate to the folder above (remember the /tmp/finn_xxx folder is mounted on the host as well as inside Docker) you can open the Vivado project (.xpr) file there using Vivado, and view the following stitched IP block design:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "model.save(build_dir+\"/tfc_w1_a1_ipstitch.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "At this point, one could take the generated stitched IP and integrate it into your own project using Vivado IP Integrator if desired. Here, we will continue the tutorial by assuming that we want to do a stand-alone deployment for this accelerator for a PYNQ board." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. Synthesize, Deploy and Test on PYNQ <a id='hw_test'></a>\n", + "\n", + "* [Inserting the IP into a PYNQ Overlay Shell](#pynq_shell)\n", + "* [Synthesis, place and route](#synth_pl_ro)\n", + "* [Driver Generation](#driver_gen)\n", + "* [Deployment and Remote Execution](#deploy)\n", + "\n", + "\n", + "We are almost done preparing our hardware design. We'll now put it in a form suitable for use as a PYNQ overlay, synthesize and deploy it." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Inserting the IP into a PYNQ Overlay Shell <a id='pynq_shell'></a>\n", + "\n", + "We are almost done preparing our hardware design. To deploy our accelerator on a PYNQ platform, it needs to be put inside an appropriate *shell* that bridges it with the interfaces that the underlying system exposes. FINN makes it easy to create a PYNQ-compatible overlay by inserting the stitched IP into an appropriate PYNQ shell with the `MakePYNQProject` transformation, and view the created PYNQ shell project directory using the `metadata_props`. **This invokes Vivado and may take a few minutes to run.**" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[key: \"vivado_stitch_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo\"\n", + ", key: \"vivado_stitch_vlnv\"\n", + "value: \"xilinx_finn:finn:finn_design:1.0\"\n", + ", key: \"wrapper_filename\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n", + ", key: \"vivado_pynq_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_pynq_proj_hq9mfroo\"\n", + "]" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject\n", + "model = ModelWrapper(build_dir+\"/tfc_w1_a1_ipstitch.onnx\")\n", + "model = model.transform(MakePYNQProject(pynq_board))\n", + "model.model.metadata_props" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ip_config.tcl\t resizer.cache\tresizer.ip_user_files resizer.xpr\r\n", + "make_project.sh resizer.hw\tresizer.srcs\t synth_project.sh\r\n" + ] + } + ], + "source": [ + "! ls {model.get_metadata_prop(\"vivado_pynq_proj\")}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we open the created Vivado project (.xpr) under the `vivado_pynq_proj` directory above, we can see the system-level block design as below, with the FINN-generated part of the design highlighted. Various other components, such as the DMA engine and data width converters, have also been instantiated.\n", + "" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [], + "source": [ + "model.save(build_dir + \"/tfc_w1_a1_pynq_project.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Synthesis, place and route <a id='synth_pl_ro'></a>" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We are now ready for the final hardware generation step, which is synthesis, place and route to generate an FPGA bitfile. This can be done by either running the `synth_project.sh` script in the generated Vivado PYNQ project directory inside Docker, or by executing the `SynthPYNQProject` transformation. **This step involves launching Vivado for synthesis and may take a few hours.**" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[key: \"vivado_stitch_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo\"\n", + ", key: \"vivado_stitch_vlnv\"\n", + "value: \"xilinx_finn:finn:finn_design:1.0\"\n", + ", key: \"wrapper_filename\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n", + ", key: \"vivado_pynq_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_pynq_proj_hq9mfroo\"\n", + ", key: \"vivado_pynq_bitfile\"\n", + "value: \"/tmp/finn_maltanar/vivado_pynq_proj_hq9mfroo/resizer.bit\"\n", + "]" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject\n", + "model = ModelWrapper(build_dir + \"/tfc_w1_a1_pynq_project.onnx\")\n", + "model = model.transform(SynthPYNQProject())\n", + "model.model.metadata_props" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [], + "source": [ + "model.save(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Driver generation <a id='driver_gen'></a>\n", + "\n", + "Now that we have synthesized a bitfile for our network, we will generate some Python code for PYNQ that will act as the driver for this bitfile, package everything into a deployment folder and copy that to our PYNQ board." + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver\n", + "model = ModelWrapper(build_dir + \"/tfc_w1_a1_post_synthesis.onnx\")\n", + "model = model.transform(MakePYNQDriver())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The generated driver is placed in a folder that is indicated by the `pynq_driver_dir` top-level metadata. We can examine the generated PYNQ Python driver code as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\r\n", + "from pynq import Overlay\r\n", + "import numpy as np\r\n", + "from pynq import allocate\r\n", + "from finn.util.data_packing import (\r\n", + " finnpy_to_packed_bytearray,\r\n", + " packed_bytearray_to_finnpy\r\n", + ")\r\n", + "from finn.core.datatype import DataType\r\n", + "\r\n", + "bitfile_path = \"resizer.bit\"\r\n", + "ol = Overlay(bitfile_path)\r\n", + "dma=ol.axi_dma_0\r\n", + "\r\n", + "# declare input/output types and shapes for the accelerator\r\n", + "# input FINN DataType\r\n", + "idt = DataType.BINARY\r\n", + "# normal, folded and packed input shapes\r\n", + "ishape_normal = (1, 784)\r\n", + "ishape_folded = (1, 49, 16)\r\n", + "ishape_packed = (1, 49, 2)\r\n", + "# output FINN DataType\r\n", + "odt = DataType.UINT32\r\n", + "# normal, folded and packed output shapes\r\n", + "oshape_normal = (1, 10)\r\n", + "oshape_folded = (1, 1, 10)\r\n", + "oshape_packed = (1, 1, 40)\r\n", + "\r\n", + "# load desired input .npy file\r\n", + "ibuf_normal = np.load(\"input.npy\")\r\n", + "# ensure that shape is as expected\r\n", + "assert ibuf_normal.shape == ishape_normal\r\n", + "# convert to folded form\r\n", + "ibuf_folded = ibuf_normal.reshape(ishape_folded)\r\n", + "\r\n", + "# pack the input buffer, reversing both SIMD dim and endianness\r\n", + "ibuf_packed = finnpy_to_packed_bytearray(\r\n", + " ibuf_folded, idt, reverse_endian=True, reverse_inner=True\r\n", + ")\r\n", + "# allocate a PYNQ buffer for the packed input buffer\r\n", + "ibuf_packed_device = allocate(shape=ishape_packed, dtype=np.uint8)\r\n", + "# copy the packed data into the PYNQ buffer\r\n", + "# TODO optimization: pack directly into the PYNQ buffer?\r\n", + "np.copyto(ibuf_packed_device, ibuf_packed)\r\n", + "\r\n", + "# allocate a PYNQ buffer for the returned packed output buffer\r\n", + "obuf_packed = allocate(shape=oshape_packed, dtype=np.uint8)\r\n", + "\r\n", + "# set up the DMA and wait until all transfers complete\r\n", + "dma.sendchannel.transfer(ibuf_packed_device)\r\n", + "dma.recvchannel.transfer(obuf_packed)\r\n", + "dma.sendchannel.wait()\r\n", + "dma.recvchannel.wait()\r\n", + "\r\n", + "# unpack the packed output buffer from accelerator\r\n", + "obuf_folded = packed_bytearray_to_finnpy(\r\n", + " obuf_packed, odt, oshape_folded, reverse_endian=True, reverse_inner=True\r\n", + ")\r\n", + "# convert to normal reshape and save\r\n", + "obuf_normal = obuf_folded.reshape(oshape_normal)\r\n", + "np.save(\"output.npy\", obuf_normal)\r\n" + ] + } + ], + "source": [ + "driver_dir = model.get_metadata_prop(\"pynq_driver_dir\")\n", + "! cat {driver_dir}/driver.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can see that the generated driver contains the expected input/output shapes, expecting a file called `input.npy` to be provided prior to execution, which will be read in, packed into the format that the accelerator expects, running it and generating an `output.npy` file with the results. You can build your own applications around the accelerator by modifying the driver, or use the remote execution capabilities that FINN provides just to check if it is working, which will be our next step." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Deployment and Remote Execution <a id='deploy'></a>\n", + "\n", + "We'll now use the `DeployToPYNQ` transformation to create a deployment folder with the bitfile and driver file(s), and copy that to the PYNQ board. You can change the default IP address, username, password and target folder for the PYNQ below." + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n", + "ip = \"192.168.3.1\"\n", + "username = \"xilinx\"\n", + "password = \"xilinx\"\n", + "target_dir = \"/home/xilinx/finn_tfc_end2end_example\"\n", + "model = model.transform(DeployToPYNQ(ip, username, password, target_dir))\n", + "model.save(build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's verify that the remote access credentials is saved in the model metadata, and that the deployment folder has been successfully copied to the board:" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[key: \"vivado_stitch_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo\"\n", + ", key: \"vivado_stitch_vlnv\"\n", + "value: \"xilinx_finn:finn:finn_design:1.0\"\n", + ", key: \"wrapper_filename\"\n", + "value: \"/tmp/finn_maltanar/vivado_stitch_proj_oo2lpoeo/finn_vivado_stitch_proj.srcs/sources_1/bd/finn_design/hdl/finn_design_wrapper.v\"\n", + ", key: \"vivado_pynq_proj\"\n", + "value: \"/tmp/finn_maltanar/vivado_pynq_proj_hq9mfroo\"\n", + ", key: \"vivado_pynq_bitfile\"\n", + "value: \"/tmp/finn_maltanar/vivado_pynq_proj_hq9mfroo/resizer.bit\"\n", + ", key: \"pynq_driver_dir\"\n", + "value: \"/tmp/finn_maltanar/pynq_driver_25t8u9sd\"\n", + ", key: \"pynq_ip\"\n", + "value: \"192.168.3.1\"\n", + ", key: \"pynq_username\"\n", + "value: \"xilinx\"\n", + ", key: \"pynq_password\"\n", + "value: \"xilinx\"\n", + ", key: \"pynq_target_dir\"\n", + "value: \"/home/xilinx/finn_tfc_end2end_example\"\n", + ", key: \"pynq_deployment_dir\"\n", + "value: \"/tmp/finn_maltanar/pynq_deployment_mpyziv7h\"\n", + ", key: \"pynq_deploy_dir\"\n", + "value: \"/tmp/finn_maltanar/pynq_deployment_mpyziv7h\"\n", + ", key: \"exec_mode\"\n", + "value: \"remote_pynq\"\n", + "]" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.model.metadata_props" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/xilinx/finn_tfc_end2end_example/pynq_deployment_1oyo7x66:\r\n", + "total 5820\r\n", + "-rw-r--r-- 1 xilinx xilinx 1934 Feb 13 13:36 driver.py\r\n", + "drwxr-xr-x 4 xilinx xilinx 4096 Feb 13 13:36 finn\r\n", + "-rw-r--r-- 1 xilinx xilinx 3264 Feb 13 14:24 input.npy\r\n", + "-rw-r--r-- 1 root root 120 Feb 13 14:24 output.npy\r\n", + "-rw-r--r-- 1 xilinx xilinx 5568787 Feb 13 13:36 resizer.bit\r\n", + "-rw-r--r-- 1 xilinx xilinx 368173 Feb 13 13:36 resizer.hwh\r\n", + "-rw-r--r-- 1 root root 32 Feb 13 14:24 sds_trace_data.dat\r\n", + "\r\n", + "/home/xilinx/finn_tfc_end2end_example/pynq_deployment_mpyziv7h:\r\n", + "total 5808\r\n", + "-rw-r--r-- 1 xilinx xilinx 1934 Feb 28 16:09 driver.py\r\n", + "drwxr-xr-x 4 xilinx xilinx 4096 Feb 28 16:09 finn\r\n", + "-rw-r--r-- 1 xilinx xilinx 5568787 Feb 28 16:09 resizer.bit\r\n", + "-rw-r--r-- 1 xilinx xilinx 368173 Feb 28 16:09 resizer.hwh\r\n" + ] + } + ], + "source": [ + "! sshpass -p {password} ssh {username}@{ip} 'ls -l {target_dir}/*'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We only have two more steps to be able to remotely execute the deployed bitfile with some test data from the MNIST dataset. Let's load up some test data that comes bundled with FINN." + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "<matplotlib.image.AxesImage at 0x7f17e0a82e10>" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from pkgutil import get_data\n", + "import onnx.numpy_helper as nph\n", + "import matplotlib.pyplot as plt\n", + "\n", + "raw_i = get_data(\"finn\", \"data/onnx/mnist-conv/test_data_set_0/input_0.pb\")\n", + "x = nph.to_array(onnx.load_tensor_from_string(raw_i))\n", + "plt.imshow(x.reshape(28,28), cmap='gray')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Recall that we partitioned our original network into a parent graph that contained the non-synthesizable nodes and a child graph that contained the bulk of the network, which we turned into a bitfile. We'll load up the parent graph, modify the `StreamingDataflowPartition` node so that it points to the deployed ONNX graph." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [], + "source": [ + "parent_model = ModelWrapper(build_dir+\"/tfc_w1_a1_dataflow_parent.onnx\")\n", + "sdp_node = parent_model.graph.node[2]\n", + "remote_exec_model = build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\"\n", + "getCustomOp(sdp_node).set_nodeattr(\"model\", remote_exec_model)\n", + "parent_model.save(build_dir+\"/tfc_w1_a1_dataflow_parent_with_remote_bitfile_exec.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we can call `execute_onnx` on the parent graph, which will internally call remote execution with the bitfile once the `StreamingDataflowPartition` node is reached, grab the results, then continue executing the last portion of the network. " + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from finn.core.onnx_exec import execute_onnx\n", + "iname = parent_model.graph.input[0].name\n", + "oname = parent_model.graph.output[0].name\n", + "ishape = parent_model.get_tensor_shape(iname)\n", + "input_dict = {iname: x.reshape(ishape)}\n", + "ret = execute_onnx(parent_model, input_dict, True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We'll pass the output of the network through a softmax function to interpret it as probabilities, and plot the per-class probabilities as a bar chart." + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "<BarContainer object of 10 artists>" + ] + }, + "execution_count": 49, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAMoUlEQVR4nO3cf6jd913H8edryercD1sxV9AkLgEzNQyl5dJVC1pshbSV5A9FGqjoKMs/y6yuKJlKHfWfzcn8gXUa5xzO2azWIcFGI7iKILbkdp11SYxcstrcrNK7rtYfQ7Pg2z/uiZzd3ptzkp57T/u+zwcEzvf7/XC+75ObPDn3e36kqpAkvfa9btoDSJImw6BLUhMGXZKaMOiS1IRBl6QmNk/rxFu2bKkdO3ZM6/SS9Jr05JNPfqmqZlY6NrWg79ixg7m5uWmdXpJek5L8y2rHvOQiSU0YdElqwqBLUhMjg57kY0meT/L5VY4nyW8mmU/ydJIbJj+mJGmUcZ6hfxzYc5njtwO7Bn8OAB955WNJkq7UyKBX1d8CX77Mkn3AH9aSx4HrknzLpAaUJI1nEtfQtwLnhrYXBvteJsmBJHNJ5hYXFydwaknSJev6omhVHa6q2aqanZlZ8X3xkqSrNImgnwe2D21vG+yTJK2jSXxS9ChwMMkR4B3AS1X13ATuV8vsOPTomp/jmQ/cuebnkLQ2RgY9yUPALcCWJAvALwGvB6iq3wGOAXcA88BXgHeu1bCSpNWNDHpV7R9xvIB3T2wiSdJV8ZOiktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1MRYQU+yJ8mZJPNJDq1w/NuSPJbkqSRPJ7lj8qNKki5nZNCTbAIeBG4HdgP7k+xetuwXgYer6nrgLuC3Jz2oJOnyxnmGfiMwX1Vnq+oCcATYt2xNAd8wuH0t8MXJjShJGsc4Qd8KnBvaXhjsG/Z+4O4kC8Ax4D0r3VGSA0nmkswtLi5exbiSpNVM6kXR/cDHq2obcAfwiSQvu++qOlxVs1U1OzMzM6FTS5JgvKCfB7YPbW8b7Bt2D/AwQFX9PfAGYMskBpQkjWecoJ8AdiXZmeQall70PLpszbPArQBJvouloHtNRZLW0cigV9VF4CBwHDjN0rtZTiZ5IMnewbL7gHcl+QfgIeAnq6rWamhJ0sttHmdRVR1j6cXO4X33D90+Bdw82dEkSVfCT4pKUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSE2MFPcmeJGeSzCc5tMqaH0tyKsnJJH882TElSaNsHrUgySbgQeCHgAXgRJKjVXVqaM0u4H3AzVX1YpJvXquBJUkrG+cZ+o3AfFWdraoLwBFg37I17wIerKoXAarq+cmOKUkaZZygbwXODW0vDPYNexvwtiR/l+TxJHtWuqMkB5LMJZlbXFy8uoklSSua1Iuim4FdwC3AfuD3kly3fFFVHa6q2aqanZmZmdCpJUkwXtDPA9uHtrcN9g1bAI5W1Ver6gvAP7MUeEnSOhkn6CeAXUl2JrkGuAs4umzNn7H07JwkW1i6BHN2gnNKkkYYGfSquggcBI4Dp4GHq+pkkgeS7B0sOw68kOQU8Bjws1X1wloNLUl6uZFvWwSoqmPAsWX77h+6XcB7B38kSVPgJ0UlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpibGCnmRPkjNJ5pMcusy6H0lSSWYnN6IkaRwjg55kE/AgcDuwG9ifZPcK694C3As8MekhJUmjjfMM/UZgvqrOVtUF4Aiwb4V1vwx8EPjvCc4nSRrTOEHfCpwb2l4Y7Pt/SW4AtlfVo5e7oyQHkswlmVtcXLziYSVJq3vFL4omeR3wYeC+UWur6nBVzVbV7MzMzCs9tSRpyDhBPw9sH9reNth3yVuAtwN/k+QZ4CbgqC+MStL6GifoJ4BdSXYmuQa4Czh66WBVvVRVW6pqR1XtAB4H9lbV3JpMLEla0cigV9VF4CBwHDgNPFxVJ5M8kGTvWg8oSRrP5nEWVdUx4NiyffevsvaWVz6WJOlK+UlRSWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJamKsoCfZk+RMkvkkh1Y4/t4kp5I8neSvk7x18qNKki5nZNCTbAIeBG4HdgP7k+xetuwpYLaqvht4BPiVSQ8qSbq8cZ6h3wjMV9XZqroAHAH2DS+oqseq6iuDzceBbZMdU5I0yjhB3wqcG9peGOxbzT3AX6x0IMmBJHNJ5hYXF8efUpI00kRfFE1yNzALfGil41V1uKpmq2p2ZmZmkqeWpA1v8xhrzgPbh7a3DfZ9jSS3Ab8A/EBV/c9kxpMkjWucZ+gngF1Jdia5BrgLODq8IMn1wO8Ce6vq+cmPKUkaZWTQq+oicBA4DpwGHq6qk0keSLJ3sOxDwJuBP0nyuSRHV7k7SdIaGeeSC1V1DDi2bN/9Q7dvm/BckqQr5CdFJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqQmDLklNGHRJasKgS1ITBl2SmjDoktSEQZekJgy6JDVh0CWpCYMuSU0YdElqwqBLUhMGXZKaMOiS1IRBl6QmDLokNWHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUhEGXpCYMuiQ1YdAlqYmxgp5kT5IzSeaTHFrh+Ncl+dTg+BNJdkx6UEnS5Y0MepJNwIPA7cBuYH+S3cuW3QO8WFXfDvwa8MFJDypJurzNY6y5EZivqrMASY4A+4BTQ2v2Ae8f3H4E+K0kqaqa4Kyaoh2HHl3zczzzgTvX/ByvNWv99+7feS/jBH0rcG5oewF4x2prqupikpeAbwK+NLwoyQHgwGDzP5OcuZqhr9KW5fNsEFf0uDPF360mfG5/3mOY5s97wjbSz/utqx0YJ+gTU1WHgcPrec5LksxV1ew0zj1NPu6Nxce9sY3zouh5YPvQ9rbBvhXXJNkMXAu8MIkBJUnjGSfoJ4BdSXYmuQa4Czi6bM1R4CcGt38U+IzXzyVpfY285DK4Jn4QOA5sAj5WVSeTPADMVdVR4PeBTySZB77MUvRfbaZyqedVwMe9sfi4N7D4RFqSevCTopLUhEGXpCbaB33U1xZ0lGR7kseSnEpyMsm9055pPSXZlOSpJH8+7VnWU5LrkjyS5J+SnE7yvdOeaT0k+ZnBv/PPJ3koyRumPdO0tA76mF9b0NFF4L6q2g3cBLx7gzzuS+4FTk97iCn4DeAvq+o7ge9hA/wdJNkK/BQwW1VvZ+mNG6/GN2Wsi9ZBZ+hrC6rqAnDpawtaq6rnquqzg9v/wdJ/7K3TnWp9JNkG3Al8dNqzrKck1wLfz9I7zqiqC1X1b9Odat1sBr5+8BmYNwJfnPI8U9M96Ct9bcGGCNslg2++vB54YrqTrJtfB34O+N9pD7LOdgKLwB8MLjd9NMmbpj3UWquq88CvAs8CzwEvVdVfTXeq6eke9A0tyZuBPwV+uqr+fdrzrLUkPww8X1VPTnuWKdgM3AB8pKquB/4LaP+aUZJvZOm37p3AtwJvSnL3dKeanu5BH+drC1pK8nqWYv7Jqvr0tOdZJzcDe5M8w9LltR9M8kfTHWndLAALVXXpN7FHWAp8d7cBX6iqxar6KvBp4PumPNPUdA/6OF9b0E6SsHQt9XRVfXja86yXqnpfVW2rqh0s/aw/U1Ub4tlaVf0rcC7Jdwx23crXfsV1V88CNyV54+Df/a1sgBeDV7Ou37a43lb72oIpj7UebgZ+HPjHJJ8b7Pv5qjo2xZm09t4DfHLw5OUs8M4pz7PmquqJJI8An2Xp3V1PsYG/BsCP/ktSE90vuUjShmHQJakJgy5JTRh0SWrCoEtSEwZdkpow6JLUxP8BwjHuoBhu1y0AAAAASUVORK5CYII=\n", + "text/plain": [ + "<Figure size 432x288 with 1 Axes>" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "def softmax(x):\n", + " \"\"\"Compute softmax values for each sets of scores in x.\"\"\"\n", + " e_x = np.exp(x - np.max(x))\n", + " return e_x / e_x.sum()\n", + "\n", + "logits = ret[oname].flatten()\n", + "prob = softmax(logits)\n", + "\n", + "plt.bar(np.arange(10), prob)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We see that the network correctly predicts this as a digit 2 with high probability. This concludes our tutorial on how to take a simple fully-connected BNN all the way down to hardware with FINN, and execute it remotely on a PYNQ board." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/end2end_example/tfc_end2end_verification.ipynb b/notebooks/end2end_example/tfc_end2end_verification.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6eab7220f20fc78b17cfb0745e69e60ead16b3a0 --- /dev/null +++ b/notebooks/end2end_example/tfc_end2end_verification.ipynb @@ -0,0 +1,552 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# FINN - Functional Verification of End-to-End Flow\n", + "-----------------------------------------------------------------\n", + "\n", + "**Important: This notebook depends on the tfc_end2end_example notebook, because we are using models that are available at intermediate steps in the end-to-end flow. So please make sure the needed .onnx files are generated to run this notebook.**\n", + "\n", + "In this notebook, we will show how to take the intermediate results of the end-to-end tfc example and verify their functionality with different methods. In the following picture you can see the block in the end-to-end flow about the *Simulation & Emulation flows for functional verification*. Besides the methods in this notebook, there is another one that is covered in the Jupyter notebook [tfc_end2end_example](tfc_end2end_example.ipynb): remote execution. The remote execution allows functional verification directly on the PYNQ board, for details please have a look at the mentioned Jupyter notebook." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We will use the following helper functions, `showSrc` to show source code of FINN library calls and `showInNetron` to show the ONNX model at the current transformation step. The Netron displays are interactive, but they only work when running the notebook actively and not on GitHub (i.e. if you are viewing this on GitHub you'll only see blank squares)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import inspect\n", + "import netron\n", + "from finn.util.basic import make_build_dir\n", + "from IPython.display import IFrame\n", + "\n", + "def showSrc(what):\n", + " print(\"\".join(inspect.getsourcelines(what)[0]))\n", + " \n", + "def showInNetron(model_filename):\n", + " netron.start(model_filename, port=8081, host=\"0.0.0.0\")\n", + " return IFrame(src=\"http://0.0.0.0:8081/\", width=\"100%\", height=400)\n", + " \n", + "build_dir = \"/workspace/finn\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To verify the simulations a \"golden\" output is calculated as a reference. This is calculated directly from the Brevitas model using PyTorch, by running some example data from the MNIST dataset through the trained model." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-0.4992097 , -0.24960485, 6.489726 , 0.99841946, -0.24960482,\n", + " -2.2464437 , 0.7488146 , -1.4976292 , -0.49920973, -2.7456534 ]],\n", + " dtype=float32)" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from pkgutil import get_data\n", + "import onnx\n", + "import onnx.numpy_helper as nph\n", + "import torch\n", + "from finn.util.test import get_test_model_trained\n", + "\n", + "fc = get_test_model_trained(\"TFC\", 1, 1)\n", + "raw_i = get_data(\"finn\", \"data/onnx/mnist-conv/test_data_set_0/input_0.pb\")\n", + "input_tensor = onnx.load_tensor_from_string(raw_i)\n", + "input_brevitas = torch.from_numpy(nph.to_array(input_tensor)).float()\n", + "output_golden = fc.forward(input_brevitas).detach().numpy()\n", + "output_golden" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Simulation using Python \n", + "\n", + "If an ONNX model consists of [standard ONNX](https://github.com/onnx/onnx/blob/master/docs/Operators.md) nodes and/or FINN custom operations that do not belong to the fpgadataflow (backend $\\neq$ \"fpgadataflow\") this model can be checked for functionality using Python. General information about FINN custom op nodes can be found in Jupyter notebook [2_custom_op.ipynb](../internals/2_custom_op.ipynb).\n", + "\n", + "To simulate a standard ONNX node [onnxruntime](https://github.com/microsoft/onnxruntime) is used. onnxruntime is an open source tool developed by Microsoft to run standard ONNX nodes. For the FINN custom op nodes execution functions are defined. The following is an example of the execution function of a XNOR popcount node.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "def xnorpopcountmatmul(inp0, inp1):\n", + " \"\"\"Simulates XNOR-popcount matrix multiplication as a regular bipolar\n", + " matrix multiplication followed by some post processing.\"\"\"\n", + " # extract the operand shapes\n", + " (M, K0) = inp0.shape\n", + " (K1, N) = inp1.shape\n", + " # make sure shapes are compatible with matmul\n", + " assert K0 == K1, \"Matrix shapes are not compatible with matmul.\"\n", + " K = K0\n", + " # convert binary inputs to bipolar\n", + " inp0_bipolar = 2.0 * inp0 - 1.0\n", + " inp1_bipolar = 2.0 * inp1 - 1.0\n", + " # call regular numpy matrix multiplication\n", + " out = np.matmul(inp0_bipolar, inp1_bipolar)\n", + " # XNOR-popcount does not produce the regular dot product result --\n", + " # it returns the number of +1s after XNOR. let P be the number of +1s\n", + " # and N be the number of -1s. XNOR-popcount returns P, whereas the\n", + " # regular dot product result from numpy is P-N, so we need to apply\n", + " # some correction.\n", + " # out = P-N\n", + " # K = P+N\n", + " # out + K = 2P, so P = (out + K)/2\n", + " return (out + K) * 0.5\n", + "\n" + ] + } + ], + "source": [ + "from finn.custom_op.xnorpopcount import xnorpopcountmatmul\n", + "showSrc(xnorpopcountmatmul)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The function contains a description of the behaviour in Python and can thus calculate the result of the node.\n", + "\n", + "This execution function and onnxruntime is used when `execute_onnx` from `onnx_exec` is applied to the model. The model is then simulated node by node and the result is stored in a context dictionary, which contains the values of each tensor at the end of the execution. To get the result, only the output tensor has to be extracted.\n", + "\n", + "The procedure is shown below. We take the model right before the nodes should be converted into HLS layers and generate an input tensor to pass to the execution function. The input tensor is generated from the Brevitas example inputs." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from finn.core.modelwrapper import ModelWrapper\n", + "input_dict = {\"global_in\": nph.to_array(input_tensor)}\n", + "\n", + "model_for_sim = ModelWrapper(build_dir+\"/tfc_w1a1_ready_for_hls_conversion.onnx\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Results are the same!\n" + ] + } + ], + "source": [ + "import finn.core.onnx_exec as oxe\n", + "output_dict = oxe.execute_onnx(model_for_sim, input_dict)\n", + "output_pysim = output_dict[list(output_dict.keys())[0]]\n", + "\n", + "\n", + "\n", + "if np.isclose(output_pysim, output_golden, atol=1e-3).all():\n", + " print(\"Results are the same!\")\n", + "else:\n", + " print(\"The results are not the same!\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The result is compared with the theoretical \"golden\" value for verification." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Simulation (npysim) using C++\n", + "\n", + "When dealing with HLS custom op nodes in FINN the simulation using Python is no longer sufficient. After the nodes have been converted to HLS layers, the simulation using C++ can be used. To do this, the input tensor is stored in an .npy file and C++ code is generated that reads the values from the .npy array, streams them to the corresponding finn-hlslib function and writes the result to a new .npy file. This in turn can be read in Python and processed in the FINN flow. For this example the model after the conversion to HLS layers is used." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "model_for_npysim = ModelWrapper(build_dir+\"/tfc_w1_a1_hls_layers.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To generate the code for this simulation and to generate the executable two transformations are used:\n", + "* `CodeGen_npysim` which generates the C++ code for the corresponding hls layer\n", + "* `Compile` which compules the C++ code and stores the path to the executable" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim\n", + "from finn.transformation.fpgadataflow.compile import Compile\n", + "\n", + "model_for_npysim = model_for_npysim.transform(CodeGen_npysim())\n", + "model_for_npysim = model_for_npysim.transform(Compile())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When we take a look at the model using netron, we can see that the transformations introduced new attributes." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Serving '/workspace/finn/tfc_w1_a1_for_npysim.onnx' at http://0.0.0.0:8081\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " <iframe\n", + " width=\"100%\"\n", + " height=\"400\"\n", + " src=\"http://0.0.0.0:8081/\"\n", + " frameborder=\"0\"\n", + " allowfullscreen\n", + " ></iframe>\n", + " " + ], + "text/plain": [ + "<IPython.lib.display.IFrame at 0x7fb461dd6710>" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_for_npysim.save(build_dir+\"/tfc_w1_a1_for_npysim.onnx\")\n", + "showInNetron(build_dir+\"/tfc_w1_a1_for_npysim.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The following node attributes have been added:\n", + "* `code_gen_dir_npysim` indicates the directory where the files for the simulation using C++ are stored\n", + "* `executable_path` specifies the path to the executable\n", + "\n", + "We take now a closer look into the files that were generated:" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "compile.sh execute_StreamingFCLayer_Batch.cpp\tnode_model params.h thresh.h\r\n" + ] + } + ], + "source": [ + "from finn.custom_op.registry import getCustomOp\n", + "\n", + "fc0 = model_for_npysim.graph.node[2]\n", + "fc0w = getCustomOp(fc0)\n", + "code_gen_dir = fc0w.get_nodeattr(\"code_gen_dir_npysim\")\n", + "!ls {code_gen_dir}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Besides the .cpp file, the folder contains .h files with the weights and thresholds. The shell script contains the compile command and *node_model* is the executable generated by compilation. Comparing this with the `executable_path` node attribute, it can be seen that it specifies exactly the path to *node_model*." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To simulate the model the execution mode(exec_mode) must be set to \"npysim\". This is done using the transformation SetExecMode." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode\n", + "\n", + "model_for_npysim = model_for_npysim.transform(SetExecMode(\"npysim\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now the model can be executed using `execute_onnx`. The function reads the `exec_mode` and writes the input into the correct directory in a .npy file. To be able to read this in C++, there is an additional .hpp file ([npy2apintstream.hpp](https://github.com/Xilinx/finn/blob/master/src/finn/data/cpp/npy2apintstream.hpp)) in FINN, which uses cnpy to read .npy files and convert them into streams, or to read a stream and write it into an .npy. [cnpy](https://github.com/rogersce/cnpy) is a helper to read and write .npy and .npz formates in C++.\n", + "\n", + "The result is again compared to the \"golden\" output." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Results are the same!\n" + ] + } + ], + "source": [ + "output_dict = oxe.execute_onnx(model_for_npysim, input_dict)\n", + "output_npysim = output_dict[list(output_dict.keys())[0]]\n", + "\n", + "if np.isclose(output_npysim, output_golden, atol=1e-3).all():\n", + " print(\"Results are the same!\")\n", + "else:\n", + " print(\"The results are not the same!\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Emulation (rtlsim) using PyVerilator\n", + "\n", + "The emulation using [PyVerilator](https://github.com/maltanar/pyverilator) can be done after IP blocks are generated from the corresponding HLS layers. Pyverilator is a tool which makes it possible to simulate verilog files using verilator via a python interface.\n", + "\n", + "We have two ways to use rtlsim, one is to run the model node-by-node as with the simulation methods, but if the model is in the form of the dataflow partition, the part of the graph that consist of only HLS nodes could also be executed as whole." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Because at the point where we want to grab and verify the model, the model is already in split form (parent graph consisting of non-hls layers and child graph consisting only of hls layers) we first have to reference the child graph within the parent graph. This is done using the node attribute `model` for the `StreamingDataflowPartition` node.\n", + "\n", + "First the procedure is shown, if the child graph has ip blocks corresponding to the individual layers, then the procedure is shown, if the child graph already has a stitched IP." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Emulation of model layer-by-layer\n", + "\n", + "The child model is loaded and the `exec_mode` for each node is set. Then it is saved in a new .onnx file so that the changed model can be referenced in the parent model." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "child_model = ModelWrapper(build_dir + \"/tfc_w1_a1_ipgen.onnx\")\n", + "child_model = child_model.transform(SetExecMode(\"rtlsim\"))\n", + "child_model.save(build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next step is to load the parent model and set the node attribute `model` in the StreamingDataflowPartition node (`sdp_node`). Afterwards the `exec_mode` is set in the parent model in each node." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "# parent model\n", + "model_for_rtlsim = ModelWrapper(build_dir + \"/tfc_w1_a1_dataflow_parent.onnx\")\n", + "# reference child model\n", + "sdp_node = getCustomOp(model_for_rtlsim.graph.node[2])\n", + "sdp_node.set_nodeattr(\"model\", build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")\n", + "\n", + "model_for_rtlsim = model_for_rtlsim.transform(SetExecMode(\"rtlsim\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Because the necessary files for the emulation are already generated in Jupyter notebook [tfc_end2end_example](tfc_end2end_example.ipynb), in the next step the execution of the model can be done directly." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Results are the same!\n" + ] + } + ], + "source": [ + "output_dict = oxe.execute_onnx(model_for_rtlsim, input_dict)\n", + "output_rtlsim = output_dict[list(output_dict.keys())[0]]\n", + "\n", + "if np.isclose(output_rtlsim, output_golden, atol=1e-3).all():\n", + " print(\"Results are the same!\")\n", + "else:\n", + " print(\"The results are not the same!\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Emulation of stitched IP\n", + "\n", + "Here we use the same procedure. First the child model is loaded, but in contrast to the layer-by-layer emulation, the metadata property `exec_mode` is set to \"rtlsim\" for the whole child model. When the model is integrated and executed in the last step, the verilog files of the stitched IP of the child model are used." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "child_model = ModelWrapper(build_dir + \"/tfc_w1_a1_ipstitch.onnx\")\n", + "child_model.set_metadata_prop(\"exec_mode\",\"rtlsim\")\n", + "child_model.save(build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "# parent model\n", + "model_for_rtlsim = ModelWrapper(build_dir + \"/tfc_w1_a1_dataflow_parent.onnx\")\n", + "# reference child model\n", + "sdp_node = getCustomOp(model_for_rtlsim.graph.node[2])\n", + "sdp_node.set_nodeattr(\"model\", build_dir + \"/tfc_w1_a1_dataflow_child.onnx\")" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Results are the same!\n" + ] + } + ], + "source": [ + "output_dict = oxe.execute_onnx(model_for_rtlsim, input_dict)\n", + "output_rtlsim = output_dict[list(output_dict.keys())[0]]\n", + "\n", + "if np.isclose(output_rtlsim, output_golden, atol=1e-3).all():\n", + " print(\"Results are the same!\")\n", + "else:\n", + " print(\"The results are not the same!\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/notebooks/end2end_example/verification.png b/notebooks/end2end_example/verification.png new file mode 100755 index 0000000000000000000000000000000000000000..71645fef72496258544641bb09e7b529656b8812 Binary files /dev/null and b/notebooks/end2end_example/verification.png differ diff --git a/notebooks/FINN-HowToAnalysisPass.ipynb b/notebooks/internals/0_custom_analysis_pass.ipynb similarity index 94% rename from notebooks/FINN-HowToAnalysisPass.ipynb rename to notebooks/internals/0_custom_analysis_pass.ipynb index 58a89356b05dff89a093ec6bafc7c05a5826d97b..3db1d1c47acef301f7b89a05980aa68477ba567f 100644 --- a/notebooks/FINN-HowToAnalysisPass.ipynb +++ b/notebooks/internals/0_custom_analysis_pass.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -58,13 +58,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "Serving 'LFCW1A1.onnx' at http://0.0.0.0:8081\n" + "Serving '../LFCW1A1.onnx' at http://0.0.0.0:8081\n" ] } ], "source": [ "import netron\n", - "netron.start('LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")" + "netron.start('../LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")" ] }, { @@ -99,12 +99,12 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "from finn.core.modelwrapper import ModelWrapper\n", - "model = ModelWrapper('LFCW1A1.onnx')" + "model = ModelWrapper('../LFCW1A1.onnx')" ] }, { @@ -116,7 +116,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -146,7 +146,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -154,7 +154,7 @@ "output_type": "stream", "text": [ " def analysis(self, analysis_fxn):\n", - " \"\"\"Run given anaylsis_fxn on this model and return resulting dict.\"\"\"\n", + " \"\"\"Runs given anaylsis_fxn on this model and return resulting dict.\"\"\"\n", " return analysis_fxn(self)\n", "\n" ] @@ -173,7 +173,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [ { diff --git a/notebooks/FINN-HowToTransformationPass.ipynb b/notebooks/internals/1_custom_transformation_pass.ipynb similarity index 95% rename from notebooks/FINN-HowToTransformationPass.ipynb rename to notebooks/internals/1_custom_transformation_pass.ipynb index 29e888deb19afaa427fa61819912bc9ba72b17a2..f0405c0db3b02cf19476ed6cc8d293a93df00b30 100644 --- a/notebooks/FINN-HowToTransformationPass.ipynb +++ b/notebooks/internals/1_custom_transformation_pass.ipynb @@ -55,6 +55,7 @@ " def transform(self, transformation, make_deepcopy=True):\n", " \"\"\"Applies given Transformation repeatedly until no more changes can be made\n", " and returns a transformed ModelWrapper instance.\n", + " \n", " If make_deepcopy is specified, operates on a new (deep)copy of model.\n", " \"\"\"\n", " transformed_model = self\n", @@ -109,6 +110,8 @@ "output_type": "stream", "text": [ "class Transformation(ABC):\n", + " \"\"\"Transformation class all transformations are based on. Contains only \n", + " abstract method apply() every transformation has to fill.\"\"\"\n", " def __init__(self):\n", " super().__init__()\n", "\n", @@ -145,39 +148,37 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "import onnx\n", - "onnx_model = onnx.load('LFCW1A1.onnx')\n", + "onnx_model = onnx.load('../LFCW1A1.onnx')\n", "from finn.core.modelwrapper import ModelWrapper\n", "onnx_model = ModelWrapper(onnx_model)" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\n", - "Stopping http://0.0.0.0:8081\n", - "Serving 'LFCW1A1.onnx' at http://0.0.0.0:8081\n" + "Serving '../LFCW1A1.onnx' at http://0.0.0.0:8081\n" ] } ], "source": [ "import netron\n", - "netron.start('LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")" + "netron.start('../LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -200,7 +201,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -237,7 +238,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ @@ -247,7 +248,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -266,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 10, "metadata": {}, "outputs": [ { diff --git a/notebooks/FINN-CustomOps.ipynb b/notebooks/internals/2_custom_op.ipynb similarity index 60% rename from notebooks/FINN-CustomOps.ipynb rename to notebooks/internals/2_custom_op.ipynb index def670e46ff50a539df6a5e00788749c396bbd57..9aaef9d42ccde42a8f3a0213f1c287a8d72c164a 100644 --- a/notebooks/FINN-CustomOps.ipynb +++ b/notebooks/internals/2_custom_op.ipynb @@ -90,7 +90,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -98,6 +98,9 @@ "output_type": "stream", "text": [ "class CustomOp(ABC):\n", + " \"\"\"CustomOp class all custom op nodes are based on. Contains different functions \n", + " every custom node should have. Some as abstract methods, these have to be filled when\n", + " writing a new custom op node.\"\"\"\n", " def __init__(self, onnx_node):\n", " super().__init__()\n", " self.onnx_node = onnx_node\n", @@ -172,6 +175,13 @@ " \"\"\"Execute this CustomOp instance, given the execution context and\n", " ONNX graph.\"\"\"\n", " pass\n", + "\n", + " @abstractmethod\n", + " def verify_node(self):\n", + " \"\"\"Verifies that all attributes the node needs are there and\n", + " that particular attributes are set correctly. Also checks if\n", + " the number of inputs is equal to the expected number.\"\"\"\n", + " pass\n", "\n" ] } @@ -190,7 +200,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -198,6 +208,7 @@ "output_type": "stream", "text": [ "class MultiThreshold(CustomOp):\n", + " \"\"\"Class that corresponds to a multithresholding node.\"\"\"\n", " def get_nodeattr_types(self):\n", " return {\n", " \"out_dtype\": (\"s\", True, \"\"),\n", @@ -226,6 +237,52 @@ " output = multithreshold(v, thresholds, out_scale, out_bias)\n", " # setting context according to output\n", " context[node.output[0]] = output\n", + "\n", + " def verify_node(self):\n", + " info_messages = []\n", + "\n", + " # verify number of attributes\n", + " num_of_attr = 3\n", + " if len(self.onnx_node.attribute) == num_of_attr:\n", + " info_messages.append(\"The number of attributes is correct\")\n", + " else:\n", + " info_messages.append(\n", + " \"\"\"The number of attributes is incorrect,\n", + " {} should have {} attributes\"\"\".format(\n", + " self.onnx_node.op_type, num_of_attr\n", + " )\n", + " )\n", + "\n", + " # verify that \"domain\" is set to \"finn\"\n", + " domain_value = self.onnx_node.domain\n", + " if domain_value == \"finn\":\n", + " info_messages.append(\"Attribute domain is set correctly\")\n", + " else:\n", + " info_messages.append('Attribute domain should be set to \"finn\"')\n", + "\n", + " # verify that all necessary attributes exist\n", + " try:\n", + " self.get_nodeattr(\"out_scale\")\n", + " self.get_nodeattr(\"out_bias\")\n", + " self.get_nodeattr(\"out_dtype\")\n", + " info_messages.append(\"All necessary attributes exist\")\n", + " except Exception:\n", + " info_messages.append(\n", + " \"\"\"The necessary attributes do not exist.\n", + " MultiThreshold needs the following attributes:\n", + " out_scale, out_bias, out_dtype\"\"\"\n", + " )\n", + "\n", + " # verify the number of inputs\n", + " if len(self.onnx_node.input) == 2:\n", + " info_messages.append(\"The number of inputs is correct\")\n", + " else:\n", + " info_messages.append(\n", + " \"\"\"MultiThreshold needs 2 inputs\n", + " (data input and threshold values)\"\"\"\n", + " )\n", + "\n", + " return info_messages\n", "\n" ] } @@ -249,7 +306,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<font size=\"3\">`make_shape_compatible_op`: To use the flow of FINN, the transformation pass [infer_shapes](https://github.com/Xilinx/finn/blob/dev/src/finn/transformation/infer_shapes.py) is applied to the graphs in various places. In order for this transformation to be applied to CustomOps, they must first be converted to standard ONNX nodes with the same shape behavior. This means, nodes where the relationship between input and output shape is the same. \n", + "<font size=\"3\">`make_shape_compatible_op`: To use the flow of FINN, the transformation pass [infer_shapes](https://github.com/Xilinx/finn/blob/master/src/finn/transformation/infer_shapes.py) is applied to the graphs in various places. In order for this transformation to be applied to CustomOps, they must first be converted to standard ONNX nodes with the same shape behavior. This means, nodes where the relationship between input and output shape is the same. \n", "\n", "This is done at this point. Since the output shape of a multithreshold node is the same as the input shape, it can be replaced by a `\"Relu\"` node from the standard node library of onnx.</font>" ] @@ -265,7 +322,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<font size=\"3\">`execute_node`: This function allows the execution of the node, depending on the CustomOp a different functionality has to be implemented. In the case of the multithreshold node the input values and the thresholds are first extracted and after the attributes for the output scaling have been retrieved, the output is calculated with the help of a separate function. For more details regarding this function please take a look in the code [here](https://github.com/Xilinx/finn/blob/dev/src/finn/custom_op/multithreshold.py). </font>" + "<font size=\"3\">`execute_node`: This function allows the execution of the node, depending on the CustomOp a different functionality has to be implemented. In the case of the multithreshold node the input values and the thresholds are first extracted and after the attributes for the output scaling have been retrieved, the output is calculated with the help of a separate function. For more details regarding this function please take a look in the code [here](https://github.com/Xilinx/finn/blob/master/src/finn/custom_op/multithreshold.py). </font>" ] }, { @@ -323,7 +380,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -331,46 +388,119 @@ "output_type": "stream", "text": [ "class HLSCustomOp(CustomOp):\n", + " \"\"\"HLSCustomOp class all custom ops that correspond to a finn-hlslib \n", + " function are based on. Contains different functions every fpgadataflow \n", + " custom node should have. Some as abstract methods, these have to be filled\n", + " when writing a new fpgadataflow custom op node.\"\"\"\n", " def __init__(self, onnx_node):\n", " super().__init__(onnx_node)\n", - " # template for single node execution\n", - " self.docompute_template = \"\"\"\n", - " #include \"cnpy.h\"\n", - " #include \"npy2apintstream.hpp\"\n", - " #include <vector>\n", - " #include \"bnn-library.h\"\n", "\n", - " // includes for network parameters\n", - " $GLOBALS$\n", + " self.code_gen_dict = {}\n", "\n", - " // defines for network parameters\n", - " $DEFINES$\n", + " # getting templates from templates.py\n", "\n", - " int main(){\n", + " # template for single node execution\n", + " self.docompute_template = templates.docompute_template\n", "\n", - " $STREAMDECLARATIONS$\n", + " # templates for single node ip generation\n", + " # cpp file\n", + " self.ipgen_template = templates.ipgen_template\n", + " # tcl script\n", + " self.ipgentcl_template = templates.ipgentcl_template\n", "\n", - " $READNPYDATA$\n", + " def get_nodeattr_types(self):\n", + " return {\n", + " \"backend\": (\"s\", True, \"fpgadataflow\"),\n", + " \"code_gen_dir_npysim\": (\"s\", False, \"\"),\n", + " \"code_gen_dir_ipgen\": (\"s\", False, \"\"),\n", + " \"executable_path\": (\"s\", False, \"\"),\n", + " \"ipgen_path\": (\"s\", False, \"\"),\n", + " \"exec_mode\": (\"s\", False, \"\"),\n", + " \"sim_cycles\": (\"i\", False, 0),\n", + " \"rtlsim_trace\": (\"s\", False, \"\"),\n", + " }\n", "\n", - " $DOCOMPUTE$\n", + " def node_res_estimation(self):\n", + " \"\"\"Returns summarized resource estimation of BRAMs and LUTs \n", + " of the node.\"\"\"\n", + " resources = []\n", + " resources.append(\"BRAMs: \" + str(self.bram_estimation()))\n", + " resources.append(\"LUTs: \" + str(self.lut_estimation()))\n", + " return resources\n", + "\n", + " def bram_estimation(self):\n", + " \"\"\"Function for BRAM resource estimation, is member function of \n", + " HLSCustomOp class but has to be filled by every node\"\"\"\n", + " return 0\n", + "\n", + " def lut_estimation(self):\n", + " \"\"\"Function for LUT resource estimation, is member function of\n", + " HLSCustomOp class but has to be filled by every node\"\"\"\n", + " return 0\n", + "\n", + " def code_generation_ipgen(self, model, fpgapart, clk):\n", + " \"\"\"Generates c++ code and tcl script for ip generation.\"\"\"\n", + " node = self.onnx_node\n", "\n", - " $DATAOUTSTREAM$\n", + " # generate top cpp file for ip generation\n", + " path = self.get_nodeattr(\"code_gen_dir_ipgen\")\n", + " self.generate_params(model, path)\n", + " self.global_includes()\n", + " self.defines(\"ipgen\")\n", + " self.blackboxfunction()\n", + " self.pragmas()\n", + " self.docompute()\n", "\n", - " $SAVEASCNPY$\n", + " template = self.ipgen_template\n", "\n", - " }\n", + " for key in self.code_gen_dict:\n", + " # transform list into long string separated by '\\n'\n", + " code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n", + " template = template.replace(key, code_gen_line)\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_ipgen\")\n", + " f = open(os.path.join(code_gen_dir, \"top_{}.cpp\".format(node.name)), \"w\")\n", + " f.write(template)\n", + " f.close()\n", + " self.code_gen_dict.clear()\n", "\n", - " \"\"\"\n", - " self.code_gen_dict = {}\n", + " # generate tcl script for ip generation\n", + " self.code_gen_dict[\"$PROJECTNAME$\"] = [\"project_{}\".format(node.name)]\n", + " self.code_gen_dict[\"$HWSRCDIR$\"] = [code_gen_dir]\n", + " self.code_gen_dict[\"$FPGAPART$\"] = [fpgapart]\n", + " self.code_gen_dict[\"$FINNHLSLIBDIR$\"] = [\"/workspace/finn-hlslib\"]\n", + " self.code_gen_dict[\"$TOPFXN$\"] = [node.name]\n", + " self.code_gen_dict[\"$CLKPERIOD$\"] = [str(clk)]\n", "\n", - " def get_nodeattr_types(self):\n", - " return {\"code_gen_dir\": (\"s\", False, \"\"), \"executable_path\": (\"s\", False, \"\")}\n", + " template = self.ipgentcl_template\n", + "\n", + " for key in self.code_gen_dict:\n", + " # transform list into long string separated by '\\n'\n", + " code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n", + " template = template.replace(key, code_gen_line)\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_ipgen\")\n", + " f = open(os.path.join(code_gen_dir, \"hls_syn_{}.tcl\".format(node.name)), \"w\")\n", + " f.write(template)\n", + " f.close()\n", + " self.code_gen_dict.clear()\n", "\n", - " def code_generation(self, model):\n", + " def ipgen_singlenode_code(self):\n", + " \"\"\"Builds the bash script for ip generation using the IPGenBuilder from \n", + " finn.util.fpgadataflow.\"\"\"\n", " node = self.onnx_node\n", - " self.generate_params(model)\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_ipgen\")\n", + " builder = IPGenBuilder()\n", + " builder.append_tcl(code_gen_dir + \"/hls_syn_{}.tcl\".format(node.name))\n", + " builder.set_ipgen_path(code_gen_dir + \"/project_{}\".format(node.name))\n", + " builder.build(code_gen_dir)\n", + " self.set_nodeattr(\"ipgen_path\", builder.ipgen_path)\n", + "\n", + " def code_generation_npysim(self, model):\n", + " \"\"\"Generates c++ code for simulation (npysim).\"\"\"\n", + " node = self.onnx_node\n", + " path = self.get_nodeattr(\"code_gen_dir_npysim\")\n", + " self.generate_params(model, path)\n", " self.global_includes()\n", - " self.defines()\n", + " self.defines(\"npysim\")\n", " self.read_npy_data()\n", " self.strm_decl()\n", " self.docompute()\n", @@ -383,18 +513,23 @@ " # transform list into long string separated by '\\n'\n", " code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n", " template = template.replace(key, code_gen_line)\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n", " f = open(os.path.join(code_gen_dir, \"execute_{}.cpp\".format(node.op_type)), \"w\")\n", " f.write(template)\n", " f.close()\n", + " self.code_gen_dict.clear()\n", "\n", " def compile_singlenode_code(self):\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", + " \"\"\"Builds the bash script for compilation using the CppBuilder from\n", + " finn.util.basic and executes the script to produce the executable.\"\"\"\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n", " builder = CppBuilder()\n", + " # to enable additional debug features please uncommand the next line\n", + " # builder.append_includes(\"-DDEBUG\")\n", " builder.append_includes(\"-I/workspace/finn/src/finn/data/cpp\")\n", " builder.append_includes(\"-I/workspace/cnpy/\")\n", " builder.append_includes(\"-I/workspace/finn-hlslib\")\n", - " builder.append_includes(\"-I/workspace/vivado-hlslib\")\n", + " builder.append_includes(\"-I{}/include\".format(os.environ[\"VIVADO_PATH\"]))\n", " builder.append_includes(\"--std=c++11\")\n", " builder.append_sources(code_gen_dir + \"/*.cpp\")\n", " builder.append_sources(\"/workspace/cnpy/cnpy.cpp\")\n", @@ -404,12 +539,15 @@ " self.set_nodeattr(\"executable_path\", builder.executable_path)\n", "\n", " def dynamic_input_to_npy(self, context, count):\n", + " \"\"\"Saves input (given context) into .npy files. \n", + " \n", + " Count indicates the number of inputs that have to be saved.\"\"\"\n", " node = self.onnx_node\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n", " if code_gen_dir == \"\":\n", " raise Exception(\n", " \"\"\"\n", - "Found no codegen dir for this node, did you run the codegen transformation?\n", + "Found no codegen dir for this node, did you run the codegen_npysim transformation?\n", " \"\"\"\n", " )\n", " # create a npy file for each input of the node (in_ind is input index)\n", @@ -422,14 +560,16 @@ " )\n", "\n", " def npy_to_dynamic_output(self, context):\n", + " \"\"\"Reads the output from a .npy file and saves it at the right place in \n", + " the context dictionary.\"\"\"\n", " # TODO support multi-output nodes as needed\n", " node = self.onnx_node\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", + " code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n", " output = np.load(\"{}/output.npy\".format(code_gen_dir))\n", " context[node.output[0]] = output\n", "\n", " def exec_precompiled_singlenode_model(self):\n", - " # execute precompiled executable\n", + " \"\"\"Executes precompiled executable.\"\"\"\n", " executable_path = self.get_nodeattr(\"executable_path\")\n", " if executable_path == \"\":\n", " raise Exception(\n", @@ -441,44 +581,194 @@ " process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE)\n", " process_execute.communicate()\n", "\n", + " def reset_rtlsim(self, sim):\n", + " \"\"\"Sets reset input in pyverilator to zero, toggles the clock and set it\n", + " back to one\"\"\"\n", + " sim.io.ap_rst_n = 0\n", + " sim.io.ap_clk = 1\n", + " sim.io.ap_clk = 0\n", + " sim.io.ap_rst_n = 1\n", + "\n", + " def toggle_clk(self, sim):\n", + " \"\"\"Toggles the clock input in pyverilator once.\"\"\"\n", + " sim.io.ap_clk = 1\n", + " sim.io.ap_clk = 0\n", + "\n", + " def rtlsim(self, sim, inp):\n", + " \"\"\"Runs the pyverilator simulation by passing the input values to the simulation,\n", + " toggle the clock and observing the execution time. Function contains also an \n", + " observation loop that can abort the simulation if no output value is produced \n", + " after 100 cycles.\"\"\"\n", + " \n", + " trace_file = self.get_nodeattr(\"rtlsim_trace\")\n", + " if trace_file != \"\":\n", + " if trace_file == \"default\":\n", + " trace_file = self.onnx_node.name + \".vcd\"\n", + " sim.start_vcd_trace(trace_file)\n", + " inputs = inp\n", + " outputs = []\n", + " sim.io.out_V_V_TREADY = 1\n", + "\n", + " # observe if output is completely calculated\n", + " # observation_count will contain the number of cycles the calculation ran\n", + " num_out_values = self.get_number_output_values()\n", + " output_observed = False\n", + " observation_count = 0\n", + "\n", + " # avoid infinite looping of simulation by aborting when there is no change in\n", + " # output values after 100 cycles\n", + " no_change_count = 0\n", + " old_outputs = outputs\n", + " liveness_threshold = pyverilate_get_liveness_threshold_cycles()\n", + "\n", + " while not (output_observed):\n", + " sim.io.in0_V_V_TVALID = 1 if len(inputs) > 0 else 0\n", + " sim.io.in0_V_V_TDATA = inputs[0] if len(inputs) > 0 else 0\n", + " if sim.io.in0_V_V_TREADY == 1 and sim.io.in0_V_V_TVALID == 1:\n", + " inputs = inputs[1:]\n", + " if sim.io.out_V_V_TVALID == 1 and sim.io.out_V_V_TREADY == 1:\n", + " outputs = outputs + [sim.io.out_V_V_TDATA]\n", + " sim.io.ap_clk = 1\n", + " sim.io.ap_clk = 0\n", + "\n", + " observation_count = observation_count + 1\n", + " no_change_count = no_change_count + 1\n", + "\n", + " if len(outputs) == num_out_values:\n", + " self.set_nodeattr(\"sim_cycles\", observation_count)\n", + " output_observed = True\n", + "\n", + " if no_change_count == liveness_threshold:\n", + " if old_outputs == outputs:\n", + " if trace_file != \"\":\n", + " sim.flush_vcd_trace()\n", + " sim.stop_vcd_trace()\n", + " raise Exception(\n", + " \"Error in simulation! Takes too long to produce output. \"\n", + " \"Consider setting the LIVENESS_THRESHOLD env.var. to a \"\n", + " \"larger value.\"\n", + " )\n", + " else:\n", + " no_change_count = 0\n", + " old_outputs = outputs\n", + " if trace_file != \"\":\n", + " sim.flush_vcd_trace()\n", + " sim.stop_vcd_trace()\n", + " return outputs\n", + "\n", " def execute_node(self, context, graph):\n", - " # save input(s)\n", - " self.dynamic_input_to_npy(context, 1)\n", - " # execute the precompiled model\n", - " self.exec_precompiled_singlenode_model()\n", - " # load output npy file\n", - " self.npy_to_dynamic_output(context)\n", - "\n", - " def generate_params(self, model):\n", + " \"\"\"Executes single node using npysim or rtlsim.\"\"\"\n", + " mode = self.get_nodeattr(\"exec_mode\")\n", + " if mode == \"npysim\":\n", + " # save input(s)\n", + " self.dynamic_input_to_npy(context, 1)\n", + " # execute the precompiled model\n", + " self.exec_precompiled_singlenode_model()\n", + " # load output npy file\n", + " self.npy_to_dynamic_output(context)\n", + " elif mode == \"rtlsim\":\n", + " pass\n", + "\n", + " else:\n", + " raise Exception(\n", + " \"\"\"Invalid value for attribute exec_mode! Is currently set to: {}\n", + " has to be set to one of the following value (\"npysim\", \"rtlsim\")\"\"\".format(\n", + " mode\n", + " )\n", + " )\n", + "\n", + " def generate_params(self, model, path):\n", + " \"\"\"Function to generate parameters (i.e. weights and thresholds), \n", + " is member function of HLSCustomOp class but has to be filled \n", + " by every node.\"\"\"\n", + " pass\n", + "\n", + " @abstractmethod\n", + " def get_number_output_values(self):\n", + " \"\"\"Function to get the number of expected output values, \n", + " is member function of HLSCustomOp class but has to be filled \n", + " by every node.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", " def global_includes(self):\n", + " \"\"\"Function to set the global includes for c++ code that has to be generated\n", + " for npysim or rtlsim, is member function of HLSCustomOp class but has to \n", + " be filled by every node.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", - " def defines(self):\n", + " def defines(self, var):\n", + " \"\"\"Function to set the define commands for c++ code that has to be generated\n", + " for npysim or rtlsim, is member function of HLSCustomOp class but has to \n", + " be filled by every node.\n", + " \n", + " var: makes it possible to reuse the function for different c++ code generation.\n", + " I.e. if set to \"ipgen\" in StreamingFCLayer_Batch additional PRAGMA defines are\n", + " added.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", " def read_npy_data(self):\n", + " \"\"\"Function to generate the commands for reading data from .npy file in c++, \n", + " is member function of HLSCustomOp class but has to be filled by every node.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", " def strm_decl(self):\n", + " \"\"\"Function to generate the commands for the stream declaration in c++,\n", + " is member function of HLSCustomOp class but has to be filled\n", + " by every node.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", " def docompute(self):\n", + " \"\"\"Function to generate the commands for the computational part of the \n", + " c++ code, is member function of HLSCustomOp class but has to be filled\n", + " by every node.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", " def dataoutstrm(self):\n", + " \"\"\"Function to generate the commands for reading out data from c++ and convert \n", + " into npy format, is member function of HLSCustomOp class but has to be filled \n", + " by every node.\"\"\"\n", " pass\n", "\n", " @abstractmethod\n", " def save_as_npy(self):\n", + " \"\"\"Function to generate the commands for saving data in .npy file in c++,\n", + " is member function of HLSCustomOp class but has to be filled by every node.\"\"\"\n", + " pass\n", + "\n", + " @abstractmethod\n", + " def blackboxfunction(self):\n", + " \"\"\"Function to generate a blackbock function in c++ from which an IP block \n", + " will be generated, is member function of HLSCustomOp class but has to be filled \n", + " by every node.\"\"\"\n", + " pass\n", + "\n", + " @abstractmethod\n", + " def pragmas(self):\n", + " \"\"\"Function to generate the pragma commands in c++, is member function of \n", + " HLSCustomOp class but has to be filled by every node.\"\"\"\n", " pass\n", + "\n", + " def get_folded_input_shape(self):\n", + " \"\"\"Returns folded input shape (according to synapse folding), if implemented.\"\"\"\n", + " raise Exception(\"get_folded_input_shape not implemented for this op\")\n", + "\n", + " def get_folded_output_shape(self):\n", + " \"\"\"Returns folded output shape (according to neuron folding), if implemented.\"\"\"\n", + " raise Exception(\"get_folded_output_shape not implemented for this op\")\n", + "\n", + " def get_instream_width(self):\n", + " \"\"\"Returns input stream width, if implemented.\"\"\"\n", + " raise Exception(\"get_instream_width not implemented for this op\")\n", + "\n", + " def get_outstream_width(self):\n", + " \"\"\"Returns output stream width, if implemented.\"\"\"\n", + " raise Exception(\"get_outstream_width not implemented for this op\")\n", "\n" ] } @@ -555,7 +845,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": { "scrolled": true }, @@ -564,7 +854,9 @@ "name": "stdout", "output_type": "stream", "text": [ - " def generate_params(self, model):\n", + " def generate_params(self, model, path):\n", + " \"\"\"Saves weights into params.h and if existing thresholds into thresh.h.\"\"\"\n", + " code_gen_dir = path\n", " # weights\n", " weights = model.get_initializer(self.onnx_node.input[1])\n", " # convert weights into hlslib-compatible format\n", @@ -578,7 +870,7 @@ " weight_tensor, export_wdt, \"weights\", True, True\n", " )\n", " # write weights into params.h\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", + " # code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n", " f_weights = open(\"{}/params.h\".format(code_gen_dir), \"w\")\n", "\n", " if export_wdt.bitwidth() != 1:\n", @@ -598,6 +890,7 @@ " )\n", " f_weights.write(weight_hls_code)\n", " f_weights.close()\n", + "\n", " # thresholds\n", " if len(self.onnx_node.input) > 2:\n", " thresholds = model.get_initializer(self.onnx_node.input[2])\n", @@ -619,7 +912,7 @@ " threshold_tensor, tdt, \"thresholds\", False, True\n", " )\n", " # write thresholds into thresh.h\n", - " code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n", + " # code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n", " f_thresh = open(\"{}/thresh.h\".format(code_gen_dir), \"w\")\n", " tdt_hls = tdt.get_hls_datatype_str()\n", " # use binary to export bipolar activations\n", diff --git a/notebooks/internals/3_verify_hls_custom_op.ipynb b/notebooks/internals/3_verify_hls_custom_op.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5c3b43cd03d45be03a6c853a19169fbcc5c5acbf --- /dev/null +++ b/notebooks/internals/3_verify_hls_custom_op.ipynb @@ -0,0 +1,569 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# FINN - Verification of an HLSCustomOp node\n", + "-----------------------------------------------------------------\n", + "This notebook is about the verification flow and options for FINN custom operation nodes. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Outline\n", + "-------------\n", + "* Example model (sliding window function)\n", + "* c++ high level simulation\n", + "* Vivado IP synthesis and pyverilator execution flow" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Example model\n", + "To show the possibilities of how to verify a FINN HLSCustomOp node, an example model with the [sliding window function](https://finn-hlslib.readthedocs.io/en/latest/library/swg.html) of the finn-hlslib is used. For that a corresponding ONNX node is created. The ONNX node contains all the template parameters of the corresponding finn-hlslib function as attributes. The function is shown below." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the next step the individual parameters are defined. At first the class 'DataType' is imported from FINN to be able to use data types like bipolar. With the member function `bitwidth()` the parameter `Input_precision` can be derived directly from this data type. The other parameters are set to reasonable values. The output dimension can be calculated using the input dimension, the kernel size and the value for stride." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.core.datatype import DataType\n", + "idt = DataType.BIPOLAR # input data type\n", + "ip = idt.bitwidth() # input precision\n", + "k = 2 # kernel size\n", + "ifm_dim = 4 # input dimension\n", + "ifm_ch = 1 # input channels\n", + "stride = 2 # stride\n", + "simd = ifm_ch # simd\n", + "\n", + "# output dimension\n", + "ofm_dim = int(((ifm_dim - k) / stride) + 1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "An additional variable is defined to be able to infer the shape of the output tensor. Furthermore the output data type is set to the same value as the input data type." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "out_pix = ofm_dim * ofm_dim\n", + "odt = idt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To create an ONNX node, first TensorProto and helper are imported from ONNX. These can be used to create tensors, nodes, graphs and models in ONNX. After importing, the input and output tensors can be created." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "from onnx import TensorProto, helper\n", + "\n", + "inp = helper.make_tensor_value_info(\n", + " \"inp\", TensorProto.FLOAT, [1, ifm_ch, ifm_dim, ifm_dim]\n", + ")\n", + "outp = helper.make_tensor_value_info(\n", + " \"outp\", TensorProto.FLOAT, [1, out_pix, k * k * ifm_ch]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now the node can be built. This node is directly integrated into a graph environment and from this the ONNX model is created. For more information about the creation and manipulation of an ONNX model, please refer to jupyter notebook [FINN-HowToWorkWithONNX](FINN-HowToWorkWithONNX.ipynb)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "SlidingWindow_node = helper.make_node(\n", + " \"ConvolutionInputGenerator\",\n", + " [\"inp\"],\n", + " [\"outp\"],\n", + " domain=\"finn\",\n", + " backend=\"fpgadataflow\",\n", + " ConvKernelDim=k,\n", + " IFMChannels=ifm_ch,\n", + " Input_precision=ip,\n", + " IFMDim=ifm_dim,\n", + " OFMDim=ofm_dim,\n", + " SIMD=simd,\n", + " Stride=stride,\n", + " inputDataType=idt.name,\n", + " outputDataType=odt.name,\n", + " )\n", + "graph = helper.make_graph(\n", + " nodes=[SlidingWindow_node],\n", + " name=\"slidingwindow_graph\",\n", + " inputs=[inp],\n", + " outputs=[outp],\n", + " )\n", + "\n", + "model = helper.make_model(graph, producer_name=\"slidingwindow-model\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "FINN provides a thin wrapper around the ONNX model with a lot of helper functions that can be used by importing the class `ModelWrapper`. More information about `ModelWrapper` can be found in Jupyter notebook [FINN-ModelWrapper](FINN-ModelWrapper.ipynb). Here it is used to assign FINN data types to the tensors." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.core.modelwrapper import ModelWrapper\n", + "\n", + "model = ModelWrapper(model)\n", + "\n", + "model.set_tensor_datatype(\"inp\", idt)\n", + "model.set_tensor_datatype(\"outp\", odt)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "What the model looks like can be visualized with netron. Netron is a visualizer for neural network, deep learning and machine learning models. For this the model is first saved." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "model.save(\"original_model.onnx\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Serving 'original_model.onnx' at http://0.0.0.0:8081\n" + ] + } + ], + "source": [ + "import netron\n", + "netron.start('original_model.onnx', port=8081, host=\"0.0.0.0\")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%html\n", + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have the model, we can use various features of FINN to manipulate it. The basic principle of FINN is that there are transformation and analysis passes that can be applied to a model. A transformation pass changes a given model and returns the changed model. An analysis pass traverses the graph structure and produces information about certain properties. It returns a dictionary of named properties.\n", + "\n", + "The following section describes the transformation passes that can be used to verify an HLSCustomOp node. Firstly the verification with a c++ high level simulation is shown and afterwards with a Vivado IP synthesis and pyverilator execution flow." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### c++ high level simulation\n", + "\n", + "First, an additional attribute must be set to specify which of the two verification types should be used when executing the node. This is done with the transformation pass `SetExecMode`, to which the desired mode is passed. After that the transformation pass `CodeGen_npysim` can be applied. With this transformation c++ code is generated and stored in a temporary directory. In addition, a further attribute is set, which contains the path to this directory." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode\n", + "from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim\n", + "\n", + "model = model.transform(SetExecMode(\"npysim\"))\n", + "model = model.transform(CodeGen_npysim())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you now save the model again and display it, these changes can be seen." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving 'modified_model.onnx' at http://0.0.0.0:8081\n" + ] + } + ], + "source": [ + "model.save(\"modified_model.onnx\")\n", + "netron.start('modified_model.onnx', port=8081, host=\"0.0.0.0\")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%html\n", + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next step is to create the executable from the .cpp file using the `Compile` transformation. The path to the executable is also stored in a new attribute." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.fpgadataflow.compile import Compile\n", + "model = model.transform(Compile())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "All required files are now available and we can execute the node. This is done with the `execute_onnx` function, which gets the model and an input dictionary. That means we have to create an input tensor first. For this we use a numpy array." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[[-1. -1. 1. 1.]\n", + " [-1. -1. -1. -1.]\n", + " [ 1. -1. 1. -1.]\n", + " [ 1. 1. 1. -1.]]]]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "x = np.asarray([-1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1], dtype=np.float32).reshape(1, ifm_ch, ifm_dim, ifm_dim)\n", + "print(x)\n", + "input_dict = {\"inp\": (x + 1) /2}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To be able to use `execute_onnx()` `onnx_exec` must be imported. Inside `execute_onnx()` the attribute `exec_mode` is read and if \"npysim\" is selected, the input array is saved in a .npy file and the previously created executable is executed. The output is saved in another .npy file and is read by `execute_onnx()` and saved as output." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[-1. -1. -1. -1.]\n", + " [ 1. 1. -1. -1.]\n", + " [ 1. -1. 1. 1.]\n", + " [ 1. -1. 1. -1.]]]\n" + ] + } + ], + "source": [ + "import finn.core.onnx_exec as oxe\n", + "y_npysim = oxe.execute_onnx(model, input_dict)[\"outp\"]\n", + "print(y_npysim)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A different transformation flow can be used for verification. This will be discussed in the next section." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Vivado IP synthesis and pyverilator execution flow\n", + "\n", + "In this verification a .cpp code is generated from the node, which is synthesized to an IP block using Vivado. Afterwards the functionality can be simulated with [pyverilator](https://github.com/maltanar/pyverilator). Pyverilator is a tool which makes it possible to simulate verilog files using verilator via a python interface." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the first step `exec_mode` must be set to \"rtlsim\" in order to select the corresponding functionality when executing the node. In addition, the nodes in the model are assigned unique names using the `GiveUniqueNodeNames()` transformation. Then the transformation `CodeGen_ipgen()` can be executed. Two arguments are passed to this transformation, one is an fpga part and the other is a value for the clock." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "from finn.transformation.general import GiveUniqueNodeNames\n", + "from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen\n", + "model = model.transform(SetExecMode(\"rtlsim\"))\n", + "model = model.transform(GiveUniqueNodeNames())\n", + "model = model.transform(CodeGen_ipgen(\"xc7z020clg400-1\", 5))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "During the transformation a new attribute with the temporary directory is set, in which the .cpp and a .tcl script are stored, with which the synthesis can be started. This can be seen in the following using netron." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Stopping http://0.0.0.0:8081\n", + "Serving 'modified_model.onnx' at http://0.0.0.0:8081\n" + ] + } + ], + "source": [ + "model.save(\"modified_model.onnx\")\n", + "netron.start('modified_model.onnx', port=8081, host=\"0.0.0.0\")" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%html\n", + "<iframe src=\"http://0.0.0.0:8081/\" style=\"position: relative; width: 100%;\" height=\"400\"></iframe>" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next step is to perform the synthesis using the `HLSSynth_IPGen()` transformation and set another attribute with the project directory, which contains the IP block. \n", + "\n", + "So that the execution can run without errors, two env variables must be set inside the jupyter notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: PWD=/workspace/finn/notebooks\n" + ] + } + ], + "source": [ + "# env variable has to be set because it is used inside the trafo\n", + "%env PWD=/workspace/finn/notebooks\n", + "\n", + "from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen\n", + "model = model.transform(HLSSynth_IPGen())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now the execution can run again and pyverilator is used in the background to simulate the generated verilog files." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[[-1., -1., -1., -1.],\n", + " [ 1., 1., -1., -1.],\n", + " [ 1., -1., 1., 1.],\n", + " [ 1., -1., 1., -1.]]], dtype=float32)" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "y_rtlsim = oxe.execute_onnx(model, input_dict)[\"outp\"]\n", + "y_rtlsim" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the last step it can be checked whether the two results from the simulations match." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "assert (y_npysim == y_rtlsim).all()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/internals/im2col_finnhlslib.PNG b/notebooks/internals/im2col_finnhlslib.PNG new file mode 100755 index 0000000000000000000000000000000000000000..4df7c3041426576fe5b422aba345a52b3d3ab51c Binary files /dev/null and b/notebooks/internals/im2col_finnhlslib.PNG differ diff --git a/requirements.txt b/requirements.txt index daefe6b51c395e2707d63246ccba62d40ad34fd7..a19b03d2179bfcdb73d04f9b71b0317de29575ee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,4 @@ pre-commit scipy sphinx wget +pyverilator diff --git a/run-docker.sh b/run-docker.sh index c025ab2a2cf935916bf22d68d1c3f08fc58c30be..6883f13c727b89f5370b3458be1d574fd2d6701a 100755 --- a/run-docker.sh +++ b/run-docker.sh @@ -1,8 +1,39 @@ #!/bin/bash +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. if [ -z "$VIVADO_PATH" ];then - echo "For correct implementation please set an environment variable VIVADO_PATH that contains the path to your vivado installation directory" - exit 1 + echo "For correct implementation please set an environment variable VIVADO_PATH that contains the path to your vivado installation directory" + exit 1 +fi + +if [ -z "$PYNQ_IP" ];then + echo "Please set the PYNQ_IP env.var. to enable PYNQ deployment tests." fi DOCKER_GID=$(id -g) @@ -10,7 +41,24 @@ DOCKER_GNAME=$(id -gn) DOCKER_UNAME=$(id -un) DOCKER_UID=$(id -u) DOCKER_PASSWD="finn" -DOCKER_TAG="finn_$DOCKER_UNAME" +# generate a random number per-run to allow multiple +# containers from the same user +DOCKER_RND=$(shuf -i0-32768 -n1) +DOCKER_TAG="finn_${DOCKER_UNAME}" +# uncomment to run multiple instances with different names +# DOCKER_INST_NAME="finn_${DOCKER_UNAME}_${DOCKER_RND}" +DOCKER_INST_NAME="finn_${DOCKER_UNAME}" +# ensure Docker tag and inst. name are all lowercase +DOCKER_TAG=$(echo "$DOCKER_TAG" | tr '[:upper:]' '[:lower:]') +DOCKER_INST_NAME=$(echo "$DOCKER_INST_NAME" | tr '[:upper:]' '[:lower:]') +# the settings below will be taken from environment variables if available, +# otherwise the defaults below will be used +: ${JUPYTER_PORT=8888} +: ${NETRON_PORT=8081} +: ${PYNQ_USERNAME="xilinx"} +: ${PYNQ_PASSWORD="xilinx"} +: ${PYNQ_BOARD="Pynq-Z1"} +: ${PYNQ_TARGET_DIR="/home/xilinx/$DOCKER_INST_NAME"} # Absolute path to this script, e.g. /home/user/bin/foo.sh SCRIPT=$(readlink -f "$0") @@ -21,35 +69,55 @@ BREVITAS_REPO=https://github.com/Xilinx/brevitas.git EXAMPLES_REPO=https://github.com/maltanar/brevitas_cnv_lfc.git CNPY_REPO=https://github.com/rogersce/cnpy.git FINN_HLS_REPO=https://github.com/Xilinx/finn-hlslib.git +PYVERILATOR_REPO=https://github.com/maltanar/pyverilator +PYNQSHELL_REPO=https://github.com/maltanar/PYNQ-HelloWorld.git BREVITAS_LOCAL=$SCRIPTPATH/brevitas EXAMPLES_LOCAL=$SCRIPTPATH/brevitas_cnv_lfc CNPY_LOCAL=$SCRIPTPATH/cnpy FINN_HLS_LOCAL=$SCRIPTPATH/finn-hlslib -VIVADO_HLS_LOCAL=$VIVADO_PATH/include +PYVERILATOR_LOCAL=$SCRIPTPATH/pyverilator +PYNQSHELL_LOCAL=$SCRIPTPATH/PYNQ-HelloWorld +BUILD_LOCAL=/tmp/$DOCKER_INST_NAME +VIVADO_HLS_LOCAL=$VIVADO_PATH +VIVADO_IP_CACHE=$BUILD_LOCAL/vivado_ip_cache # clone dependency repos git clone --branch feature/finn_onnx_export $BREVITAS_REPO $BREVITAS_LOCAL || git -C "$BREVITAS_LOCAL" pull git clone $EXAMPLES_REPO $EXAMPLES_LOCAL || git -C "$EXAMPLES_LOCAL" pull git clone $CNPY_REPO $CNPY_LOCAL || git -C "$CNPY_LOCAL" pull git clone $FINN_HLS_REPO $FINN_HLS_LOCAL; git -C "$FINN_HLS_LOCAL" checkout b5dc957a16017b8356a7010144b0a4e2f8cfd124 || git -C "$FINN_HLS_LOCAL" checkout b5dc957a16017b8356a7010144b0a4e2f8cfd124 +git clone $PYVERILATOR_REPO $PYVERILATOR_LOCAL || git -C "$PYVERILATOR_LOCAL" pull +git clone $PYNQSHELL_REPO $PYNQSHELL_LOCAL || git -C "$PYNQSHELL_LOCAL" pull + +# ensure build dir exists locally +mkdir -p $BUILD_LOCAL +mkdir -p $VIVADO_IP_CACHE +echo "Instance is named as $DOCKER_INST_NAME" echo "Mounting $SCRIPTPATH into /workspace/finn" echo "Mounting $SCRIPTPATH/brevitas into /workspace/brevitas" echo "Mounting $SCRIPTPATH/brevitas_cnv_lfc into /workspace/brevitas_cnv_lfc" echo "Mounting $SCRIPTPATH/cnpy into /workspace/cnpy" echo "Mounting $SCRIPTPATH/finn-hlslib into /workspace/finn-hlslib" -echo "Mounting $VIVADO_PATH/include into /workspace/vivado-hlslib" +echo "Mounting $SCRIPTPATH/pyverilator into /workspace/pyverilator" +echo "Mounting $SCRIPTPATH/PYNQ-HelloWorld into /workspace/PYNQ-HelloWorld" +echo "Mounting $BUILD_LOCAL into $BUILD_LOCAL" +echo "Mounting $VIVADO_PATH into $VIVADO_PATH" +echo "Port-forwarding for Jupyter $JUPYTER_PORT:$JUPYTER_PORT" +echo "Port-forwarding for Netron $NETRON_PORT:$NETRON_PORT" +echo "Vivado IP cache dir is at $VIVADO_IP_CACHE" +echo "Using default PYNQ board $PYNQ_BOARD" if [ "$1" = "test" ]; then - echo "Running test suite" - DOCKER_CMD="python setup.py test" + echo "Running test suite" + DOCKER_CMD="python setup.py test" elif [ "$1" = "notebook" ]; then - echo "Running Jupyter notebook server" - DOCKER_CMD="jupyter notebook --ip=0.0.0.0 notebooks" + echo "Running Jupyter notebook server" + DOCKER_CMD="source ~/.bashrc; jupyter notebook --ip=0.0.0.0 --port $JUPYTER_PORT notebooks" else - echo "Running container only" - DOCKER_CMD="bash" + echo "Running container only" + DOCKER_CMD="bash" fi # Build the FINN Docker image @@ -59,14 +127,32 @@ docker build --tag=$DOCKER_TAG \ --build-arg UNAME=$DOCKER_UNAME \ --build-arg UID=$DOCKER_UID \ --build-arg PASSWD=$DOCKER_PASSWD \ + --build-arg JUPYTER_PORT=$JUPYTER_PORT \ + --build-arg NETRON_PORT=$NETRON_PORT \ . # Launch container with current directory mounted -docker run --rm --name finn_dev -it \ +docker run -t --rm --name $DOCKER_INST_NAME -it \ +--hostname $DOCKER_INST_NAME \ +-e "XILINX_VIVADO=$VIVADO_PATH" \ +-e "SHELL=/bin/bash" \ -v $SCRIPTPATH:/workspace/finn \ -v $SCRIPTPATH/brevitas:/workspace/brevitas \ -v $SCRIPTPATH/brevitas_cnv_lfc:/workspace/brevitas_cnv_lfc \ -v $SCRIPTPATH/cnpy:/workspace/cnpy \ -v $SCRIPTPATH/finn-hlslib:/workspace/finn-hlslib \ --v $VIVADO_PATH/include:/workspace/vivado-hlslib \ --p 8888:8888 -p 8081:8081 \ -$DOCKER_TAG $DOCKER_CMD +-v $SCRIPTPATH/pyverilator:/workspace/pyverilator \ +-v $SCRIPTPATH/PYNQ-HelloWorld:/workspace/PYNQ-HelloWorld \ +-v $BUILD_LOCAL:$BUILD_LOCAL \ +-v $VIVADO_PATH:$VIVADO_PATH \ +-e VIVADO_PATH=$VIVADO_PATH \ +-e FINN_INST_NAME=$DOCKER_INST_NAME \ +-e FINN_ROOT="/workspace/finn" \ +-e VIVADO_IP_CACHE="$VIVADO_IP_CACHE" \ +-e PYNQ_BOARD=$PYNQ_BOARD \ +-e PYNQ_IP=$PYNQ_IP \ +-e PYNQ_USERNAME=$PYNQ_USERNAME \ +-e PYNQ_PASSWORD=$PYNQ_PASSWORD \ +-e PYNQ_TARGET_DIR=$PYNQ_TARGET_DIR \ +-p $JUPYTER_PORT:$JUPYTER_PORT \ +-p $NETRON_PORT:$NETRON_PORT \ +$DOCKER_TAG bash -c "$DOCKER_CMD" diff --git a/setup.cfg b/setup.cfg index 65b213d5b7647a8a3522f64122e1d3d1f54b1222..0a54c3b3682c8512e456623954ef2e9fc7813128 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # This file is used to configure your project. # Read more about the various options under: # http://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files @@ -88,7 +116,7 @@ dists = bdist_wheel universal = 1 [build_sphinx] -source_dir = docs +source_dir = docs/finn build_dir = build/sphinx [devpi:upload] diff --git a/setup.py b/setup.py index 13817315c346f2b40117f4598a4ddd806bbc4f9c..d7e158b56010fbc9ba2fb9f143ea2fc8d8a901d9 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # -*- coding: utf-8 -*- """ Setup file for finn. diff --git a/src/__init__.py b/src/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..83c8e8bed70797f7d6c0138968f750f72e790386 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -0,0 +1,27 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/finn/__init__.py b/src/finn/__init__.py index cd044ed324377d4d0bb5ff0cce0a375537af1eb5..76b4dacddb18404218dcb842bddf79b5f72eeb8e 100644 --- a/src/finn/__init__.py +++ b/src/finn/__init__.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # -*- coding: utf-8 -*- from pkg_resources import get_distribution, DistributionNotFound diff --git a/src/finn/analysis/__init__.py b/src/finn/analysis/__init__.py index 18e1efb37e9be82de26d933cccaf64a85bc8ff22..c3f810e6f658c248d3aee26a4e174baa5cf44ce5 100644 --- a/src/finn/analysis/__init__.py +++ b/src/finn/analysis/__init__.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + """ How to write an analysis pass for FINN -------------------------------------- diff --git a/src/finn/backend/fpgadataflow/__init__.py b/src/finn/analysis/fpgadataflow/__init__.py similarity index 100% rename from src/finn/backend/fpgadataflow/__init__.py rename to src/finn/analysis/fpgadataflow/__init__.py diff --git a/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py b/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py new file mode 100644 index 0000000000000000000000000000000000000000..0334c316b80a5c0628d00b75eb40776436cb8434 --- /dev/null +++ b/src/finn/analysis/fpgadataflow/hls_synth_res_estimation.py @@ -0,0 +1,77 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import xml.etree.ElementTree as ET + +import finn.custom_op.registry as registry +import finn.util.basic as util + + +def hls_synth_res_estimation(model): + """Extracts the results from the vivado synthesis. + + Returns {node name : resource estimation}.""" + + res_dict = {} + for node in model.graph.node: + if node.domain == "finn": + backend_attribute = util.get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + op_type = node.op_type + inst = registry.custom_op[op_type](node) + code_gen_dir = inst.get_nodeattr("code_gen_dir_ipgen") + if code_gen_dir == "": + raise Exception( + """Please run "CodeGen_ipgen" transformation and + "HLSSynth_IPGen" first to generate the report files""" + ) + else: + xmlfile = "{}/project_{}/sol1/syn/report/{}_csynth.xml".format( + code_gen_dir, node.name, node.name + ) + + if os.path.isfile(xmlfile): + res_dict[node.name] = [] + tree = ET.parse(xmlfile) + root = tree.getroot() + for item in root.findall("AreaEstimates/Resources"): + for child in item: + res_dict[node.name].append( + ["{} : {}".format(child.tag, child.text)] + ) + else: + raise Exception( + """Please run "HLSSynth_IPGen" first + to generate the report files""" + ) + + return res_dict diff --git a/src/finn/analysis/fpgadataflow/res_estimation.py b/src/finn/analysis/fpgadataflow/res_estimation.py new file mode 100644 index 0000000000000000000000000000000000000000..3585868906fb2c66aef045f49f0da919f933d012 --- /dev/null +++ b/src/finn/analysis/fpgadataflow/res_estimation.py @@ -0,0 +1,50 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import finn.custom_op.registry as registry +import finn.util.basic as util + + +def res_estimation(model): + """Estimates the resources needed for the given model. + + Returns {node name : resource estimation}.""" + + res_dict = {} + for node in model.graph.node: + if node.domain == "finn": + backend_attribute = util.get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + op_type = node.op_type + inst = registry.custom_op[op_type](node) + res_dict[node.name] = inst.node_res_estimation() + + return res_dict diff --git a/src/finn/analysis/topology.py b/src/finn/analysis/topology.py index 9150e2b118cc9e17464e5c9866f83005576d40df..c825a221ec178ee89b4e3747c982e59a3005cadd 100644 --- a/src/finn/analysis/topology.py +++ b/src/finn/analysis/topology.py @@ -1,10 +1,40 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np def is_linear(model): """Checks whether the given model graph is linear. This is done by looking at the fan-out of each tensor. All tensors have a fan-out <= 1 in a linear - graph. Returns {"is_linear", Bool}""" + graph. + + Returns {"is_linear": Bool}.""" per_tensor_fanouts = get_per_tensor_fanouts(model) # check for tensors that have fanout > 1 multi_fanouts = list(filter(lambda x: x[1] > 1, per_tensor_fanouts.items())) @@ -12,7 +42,7 @@ def is_linear(model): def get_per_tensor_fanouts(model): - """Returns a dictionary of (tensor_name, tensor_fanout) for the model.""" + """Returns a dictionary of {tensor_name: tensor_fanout} for the model.""" # make execution context to get a list of tensors per_tensor_fanouts = model.make_empty_exec_context() # replace every tensor with its fanout @@ -23,7 +53,9 @@ def get_per_tensor_fanouts(model): def all_tensors_f32(model): """Checks whether all tensors have a float32 dtype, extra quantization - annotations notwithstanding.""" + annotations notwithstanding. + + Returns {"all_tensors_f32": Bool}.""" all_tensors = model.make_empty_exec_context().items() non_f32_tensors = filter(lambda x: x[1].dtype != np.float32, all_tensors) return {"all_tensors_f32": len(list(non_f32_tensors)) == 0} @@ -33,7 +65,9 @@ def node_inputs_in_expected_order(model): """Verifies that the node inputs are ordered in the way that FINN expects them. When a node has a mixture of static (= constant, initialized) inputs and dynamic inputs, the dynamic input should come first, followed by the - static one. Only verifiable for a small subset of op_types for now.""" + static one. Only verifiable for a small subset of op_types for now. + + Returns {"node_inputs_in_expected_order": Bool}.""" op_types = ["MatMul", "Conv", "Add", "Mul"] nodes = filter(lambda x: x.op_type in op_types, model.graph.node) all_OK = True diff --git a/src/finn/analysis/verify_custom_nodes.py b/src/finn/analysis/verify_custom_nodes.py index 73d7ae590ac69226516ee1bf308ca6c2fbb41ce1..0e05022dd0cb72291128259b983513322524b9da 100644 --- a/src/finn/analysis/verify_custom_nodes.py +++ b/src/finn/analysis/verify_custom_nodes.py @@ -1,10 +1,41 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import finn.custom_op.registry as registry def verify_nodes(model): """Checks if custom ops in graph are correctly built, with all attributes - and inputs. Returns {node op_type : info_messages} - *info_messages is list of strings about the result of the verification""" + and inputs. + + Returns {node op_type : info_messages} + + * info_messages: is list of strings about the result of the verification.""" verification_dict = {} for node in model.graph.node: diff --git a/src/finn/backend/fpgadataflow/code_gen.py b/src/finn/backend/fpgadataflow/code_gen.py deleted file mode 100644 index 5e3927c013dd3ca23154e58020015e3eab0105ee..0000000000000000000000000000000000000000 --- a/src/finn/backend/fpgadataflow/code_gen.py +++ /dev/null @@ -1,148 +0,0 @@ -import finn.backend.fpgadataflow.layers as ly - - -def strm_decl(model, code_gen_dict): - num_FIFOs = get_num_of_FIFOs(model) - code_gen_dict["stream_declarations"] = [] - FIFO_ind = 1 - for node in model.graph.node: - if node.op_type == "FIFO": - name = node.name - if FIFO_ind == 1: - code_gen_dict["stream_declarations"].append( - 'hls::stream<ap_uint<L{}_SIMD>> {}("DoCompute.{}");'.format( - FIFO_ind - 1, name, name - ) - ) - # TO DO: check if elif and else path can be summarized - elif FIFO_ind == num_FIFOs: - code_gen_dict["stream_declarations"].append( - 'hls::stream<ap_uint<L{}_PE>> {}("DoCompute.{}");'.format( - FIFO_ind - 2, name, name - ) - ) - else: - code_gen_dict["stream_declarations"].append( - "hls::stream<ap_uint<L{}_PE * (L{}_AP + L{}_APF)>> " - '{}("DoCompute.{}");'.format( - FIFO_ind - 2, FIFO_ind - 2, FIFO_ind - 2, name, name - ) - ) - - FIFO_ind += 1 - - -def get_num_of_FIFOs(model): - i = 0 - for node in model.graph.node: - if node.op_type == "FIFO": - i += 1 - return i - - -def strm_prgm(model, code_gen_dict): - code_gen_dict["stream_pragmas"] = ["#pragma HLS DATAFLOW"] - for node in model.graph.node: - if node.op_type == "FIFO": - name = node.name - # TO DO: FIFOs have only one attribute, at the moment - # if there are more, change here - depth = node.attribute[0].i - code_gen_dict["stream_pragmas"].append( - "#pragma HLS stream depth={} variable={}".format(depth, name) - ) - - -def computation_cmds(model, all_strmfcl, code_gen_dict): - code_gen_dict["compute"] = [] - for i in range(len(all_strmfcl)): - consumer = model.find_consumer(all_strmfcl[i].output) - output_name = consumer.output[0] - code_gen_dict["compute"].append( - "{}<L{}_MW, L{}_MH, L{}_SIMD, L{}_PE, {}> " - "({}, {}, {}, {}, numReps, {});".format( - all_strmfcl[i].op_type, - i, - i, - i, - i, - all_strmfcl[i].resDataType, - all_strmfcl[i].input, - output_name, - all_strmfcl[i].weights, - all_strmfcl[i].thresholds, - all_strmfcl[i].resType, - ) - ) - - -def config_cmds(model, code_gen_dict): - all_strmfcl = [] - code_gen_dict["config"] = [] - - # TO DO: Find out values and add them to get_layer_parameters() - WPI = 1 - WPF = 0 - APF = 0 - - i = -1 - for node in model.graph.node: - if node.op_type == "StreamingFCLayer_Batch": - i += 1 - layer = ly.StreamingFCLayer_Batch(node, model) - code_gen_dict["config"].append( - "#define L{}_SIMD {} \n " - "#define L{}_PE {} \n " - "#define L{}_WMEM {} \n " - "#define L{}_TMEM {} \n " - "#define L{}_MW {} \n " - "#define L{}_MH {} \n " - "#define L{}_WPI {} \n " - "#define L{}_API {} \n " - "#define L{}_WPF {} \n " - "#define L{}_APF {} \n ".format( - i, - layer.SIMD, - i, - layer.PE, - i, - layer.WMEM, - i, - layer.TMEM, - i, - layer.MW, - i, - layer.MH, - i, - WPI, - i, - layer.API, - i, - WPF, - i, - APF, - ) - ) - all_strmfcl.append(layer) - return all_strmfcl - - -def code_generation(model): - - code_gen_dict = {} - - # config commands - all_strmfcl = config_cmds(model, code_gen_dict) - - # stream declarations - strm_decl(model, code_gen_dict) - - # stream pragmas - strm_prgm(model, code_gen_dict) - - # computation commands - computation_cmds(model, all_strmfcl, code_gen_dict) - - # print(code_gen_dict) - - return code_gen_dict diff --git a/src/finn/backend/fpgadataflow/layers.py b/src/finn/backend/fpgadataflow/layers.py deleted file mode 100644 index 7bd9adc6c5866278f21ddfc66825e6f20c4594c5..0000000000000000000000000000000000000000 --- a/src/finn/backend/fpgadataflow/layers.py +++ /dev/null @@ -1,70 +0,0 @@ -class StreamingFCLayer_Batch: - def __init__(self, node, model): - self.op_type = "StreamingFCLayer_Batch" - # Layer attributes - num_attr = len(node.attribute) - for k in range(num_attr): - if node.attribute[k].name == "PE": - self.PE = node.attribute[k].i - if node.attribute[k].name == "SIMD": - self.SIMD = node.attribute[k].i - if node.attribute[k].name == "MH": - self.MH = node.attribute[k].i - if node.attribute[k].name == "MW": - self.MW = node.attribute[k].i - if node.attribute[k].name == "resDataType": - self.resDataType = node.attribute[k].s.decode("utf-8") - if node.attribute[k].name == "resType": - self.resType = node.attribute[k].s.decode("utf-8") - - # get input and output names - self.input = node.input[0] - self.weights = node.input[1] - self.thresholds = node.input[2] - self.output = node.output[0] - - # get other parameters - weights_shape = model.get_tensor_shape(self.weights) - thresholds_shape = model.get_tensor_shape(self.thresholds) - self.WMEM = weights_shape[2] - self.TMEM = thresholds_shape[0] - self.API = thresholds_shape[2] - - def get_PE(self): - return self.PE - - def get_SIMD(self): - return self.SIMD - - def get_MH(self): - return self.MH - - def get_MW(self): - return self.MW - - def get_resDataType(self): - return self.resDataType - - def get_resType(self): - return self.resType - - def get_WMEM(self): - return self.WMEM - - def get_TMEM(self): - return self.TMEM - - def get_API(self): - return self.API - - def get_input_name(self): - return self.input - - def get_weights_name(self): - return self.weights - - def get_thresholds_name(self): - return self.thresholds - - def get_output_name(self): - return self.output diff --git a/src/finn/backend/fpgadataflow/utils.py b/src/finn/backend/fpgadataflow/utils.py deleted file mode 100644 index 0f3049ec70050657c4a648fe8b51a2d16691bed0..0000000000000000000000000000000000000000 --- a/src/finn/backend/fpgadataflow/utils.py +++ /dev/null @@ -1,58 +0,0 @@ -import sys - -import numpy as np - -from finn.core.datatype import DataType -from finn.core.utils import pack_innermost_dim_as_hex_string - - -def numpy_to_hls_code( - ndarray, dtype, hls_var_name, pack_innermost_dim=True, no_decl=False -): - """Return C++ code representation of a numpy ndarray with FINN DataType - dtype, using hls_var_name as the resulting C++ variable name. If - pack_innermost_dim is specified, the innermost dimension of the ndarray - will be packed into a hex string using array2hexstring. If no_decl is - set to True, no variable name and type will be generated as part of the - emitted string. - """ - hls_dtype = dtype.get_hls_datatype_str() - if type(ndarray) != np.ndarray or ndarray.dtype != np.float32: - # try to convert to a float numpy array (container dtype is float) - ndarray = np.asarray(ndarray, dtype=np.float32) - if pack_innermost_dim: - idimlen = ndarray.shape[-1] - idimbits = idimlen * dtype.bitwidth() - ndarray = pack_innermost_dim_as_hex_string(ndarray, dtype, idimbits) - hls_dtype = "ap_uint<%d>" % idimbits - ndims = ndarray.ndim - # add type string and variable name - # e.g. "const ap_uint<64>" "weightMem0" - ret = "%s %s" % (hls_dtype, hls_var_name) - # add dimensions - for d in range(ndims): - ret += "[%d]" % ndarray.shape[d] - orig_printops = np.get_printoptions() - np.set_printoptions(threshold=sys.maxsize) - - # define a function to convert a single element into a C++ init string - # a single element can be a hex string if we are using packing - def elem2str(x): - if type(x) == str or type(x) == np.str_ or type(x) == np.str: - return '%s("%s", 16)' % (hls_dtype, x) - elif type(x) == np.float32: - if dtype == DataType.FLOAT32: - return str(x) - else: - return str(int(x)) - else: - raise Exception("Unsupported type for numpy_to_hls_code") - - strarr = np.array2string(ndarray, separator=", ", formatter={"all": elem2str}) - np.set_printoptions(**orig_printops) - strarr = strarr.replace("[", "{").replace("]", "}") - if no_decl: - ret = strarr + ";" - else: - ret = ret + " = \n" + strarr + ";" - return ret diff --git a/src/finn/core/__init__.py b/src/finn/core/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..83c8e8bed70797f7d6c0138968f750f72e790386 100644 --- a/src/finn/core/__init__.py +++ b/src/finn/core/__init__.py @@ -0,0 +1,27 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/finn/core/datatype.py b/src/finn/core/datatype.py index 4c284b1cbfe472ba4d3d7d2640b692545464e80a..222d11a8872f9be757fd60fbfa5f8abea683311a 100644 --- a/src/finn/core/datatype.py +++ b/src/finn/core/datatype.py @@ -1,28 +1,31 @@ -# Copyright (c) 2018, Xilinx +# Copyright (c) 2020, Xilinx # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of the <organization> nor the -# names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. # -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from enum import Enum, auto @@ -30,8 +33,19 @@ import numpy as np class DataType(Enum): - # important to maintain ordering here: unsigned to signed, fewer to more - # bits. The get_smallest_possible() member function is dependent on this. + """Enum class that contains FINN data types to set the quantization annotation. + ONNX does not support data types smaller than 8-bit integers, whereas in FINN we are + interested in smaller integers down to ternary and bipolar. + + Assignment of DataTypes to indices based on following ordering: + + * unsigned to signed + + * fewer to more bits + + Currently supported DataTypes: """ + + # important: the get_smallest_possible() member function is dependent on ordering. BINARY = auto() UINT2 = auto() UINT3 = auto() @@ -102,7 +116,7 @@ class DataType(Enum): def allowed(self, value): """Check whether given value is allowed for this DataType. - value (float32): value to be checked""" + * value (float32): value to be checked""" if "FLOAT" in self.name: return True @@ -122,9 +136,10 @@ class DataType(Enum): raise Exception("Unrecognized data type: %s" % self.name) def get_num_possible_values(self): - """Return the number of possible values this DataType can take. Only + """Returns the number of possible values this DataType can take. Only implemented for integer types for now.""" - assert self.is_integer() + assert self.is_integer(), """This function only works for integers for now, + not for the DataType you used this function with.""" if "INT" in self.name: return abs(self.min()) + abs(self.max()) + 1 elif self.name == "BINARY" or self.name == "BIPOLAR": @@ -133,7 +148,7 @@ class DataType(Enum): return 3 def get_smallest_possible(value): - """Return smallest (fewest bits) possible DataType that can represent + """Returns smallest (fewest bits) possible DataType that can represent value. Prefers unsigned integers where possible.""" if not int(value) == value: return DataType["FLOAT32"] @@ -143,16 +158,16 @@ class DataType(Enum): return dt def signed(self): - """Return whether this DataType can represent negative numbers.""" + """Returns whether this DataType can represent negative numbers.""" return self.min() < 0 def is_integer(self): - """Return whether this DataType represents integer values only.""" + """Returns whether this DataType represents integer values only.""" # only FLOAT32 is noninteger for now return self != DataType.FLOAT32 def get_hls_datatype_str(self): - """Return the corresponding Vivado HLS datatype name.""" + """Returns the corresponding Vivado HLS datatype name.""" if self.is_integer(): if self.signed(): return "ap_int<%d>" % self.bitwidth() diff --git a/src/finn/core/execute_custom_node.py b/src/finn/core/execute_custom_node.py index e8ad421e6bff58048e53a59ab3e0a527d7b67f4f..86f7114a700c31e93d1d980693410c0e16dd128a 100644 --- a/src/finn/core/execute_custom_node.py +++ b/src/finn/core/execute_custom_node.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import finn.custom_op.registry as registry diff --git a/src/finn/core/modelwrapper.py b/src/finn/core/modelwrapper.py index cd2a2ec5952de072824ff680b0d1ecde0221aa38..6813aa06fc86c40fa8c77fee405fd5e66c812c48 100644 --- a/src/finn/core/modelwrapper.py +++ b/src/finn/core/modelwrapper.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import copy import onnx @@ -5,7 +33,8 @@ import onnx.helper as oh import onnx.numpy_helper as np_helper from onnx import TensorProto -import finn.core.utils as util +import finn.util.basic as util +import finn.util.onnx as onnxutil from finn.core.datatype import DataType @@ -32,31 +61,36 @@ class ModelWrapper: @property def graph(self): + """Returns the graph of the model.""" return self._model_proto.graph @graph.setter def graph(self, value): + """Sets the graph of the model according to value""" self._model_proto.graph = value @property def model(self): + """Returns the model.""" return self._model_proto @model.setter def model(self, value): + """Sets the model according to value.""" self._model_proto = value def save(self, filename): - """Save the wrapper ONNX ModelProto into a file with given name.""" + """Saves the wrapper ONNX ModelProto into a file with given name.""" onnx.save(self._model_proto, filename) def analysis(self, analysis_fxn): - """Run given anaylsis_fxn on this model and return resulting dict.""" + """Runs given anaylsis_fxn on this model and return resulting dict.""" return analysis_fxn(self) def transform(self, transformation, make_deepcopy=True): """Applies given Transformation repeatedly until no more changes can be made and returns a transformed ModelWrapper instance. + If make_deepcopy is specified, operates on a new (deep)copy of model. """ transformed_model = self @@ -71,8 +105,11 @@ class ModelWrapper: def check_compatibility(self): """Checks this model for FINN compatibility: + * no embedded subgraphs + * all tensor shapes are specified, including activations + * all constants are initializers """ # TODO check for no embedded subgraphs @@ -114,6 +151,19 @@ class ModelWrapper: qa.quant_parameter_tensor_names.append(dt) qnt_annotations.append(qa) + def get_tensor_valueinfo(self, tensor_name): + """Returns ValueInfoProto of tensor with given name, if it has one.""" + graph = self._model_proto.graph + vi_names = [(x.name, x) for x in graph.input] + vi_names += [(x.name, x) for x in graph.output] + vi_names += [(x.name, x) for x in graph.value_info] + try: + vi_ind = [x[0] for x in vi_names].index(tensor_name) + vi = vi_names[vi_ind][1] + return vi + except ValueError: + return None + def get_tensor_shape(self, tensor_name): """Returns the shape of tensor with given name, if it has ValueInfoProto.""" graph = self._model_proto.graph @@ -129,7 +179,7 @@ class ModelWrapper: return None def set_tensor_shape(self, tensor_name, tensor_shape, dtype=TensorProto.FLOAT): - """Assign shape in ValueInfoProto for tensor with given name.""" + """Assigns shape in ValueInfoProto for tensor with given name.""" new_vi = oh.make_tensor_value_info(tensor_name, dtype, tensor_shape) # find what container tis tensor's ValueInfo lives in # if not found anywhere, we assume it's a new value_info @@ -143,7 +193,7 @@ class ModelWrapper: target_container.append(new_vi) def set_initializer(self, tensor_name, tensor_value): - """Set the initializer value for tensor with given name.""" + """Sets the initializer value for tensor with given name.""" graph = self._model_proto.graph # convert tensor_value (numpy array) into TensorProto w/ correct name tensor_init_proto = np_helper.from_array(tensor_value) @@ -163,7 +213,7 @@ class ModelWrapper: self.set_tensor_shape(tensor_name, list(tensor_value.shape), dtype) def rename_tensor(self, old_name, new_name): - """Rename a tensor from old_name to new_name.""" + """Renames a tensor from old_name to new_name.""" graph = self.graph # sweep over inputs if util.get_by_name(graph.input, old_name) is not None: @@ -193,7 +243,7 @@ class ModelWrapper: n.output[list(n.output).index(old_name)] = new_name def get_initializer(self, tensor_name): - """Get the initializer value for tensor with given name, if any.""" + """Gets the initializer value for tensor with given name, if any.""" graph = self._model_proto.graph init_names = [x.name for x in graph.initializer] try: @@ -203,7 +253,7 @@ class ModelWrapper: return None def find_producer(self, tensor_name): - """Find and return the node that produces the tensor with given name. + """Finds and returns the node that produces the tensor with given name. Currently only works for linear graphs.""" all_outputs = [x.output[0] for x in self._model_proto.graph.node] try: @@ -213,7 +263,7 @@ class ModelWrapper: return None def find_consumer(self, tensor_name): - """Find and return the node that consumes the tensor with given name. + """Finds and returns the node that consumes the tensor with given name. Currently only works for linear graphs.""" all_inputs = [x.input[0] for x in self._model_proto.graph.node] try: @@ -223,7 +273,7 @@ class ModelWrapper: return None def get_all_tensor_names(self): - """Return a list of all (input, output and value_info) tensor names + """Returns a list of all (input, output and value_info) tensor names in the graph.""" graph = self.graph names = [x.name for x in graph.value_info] @@ -241,6 +291,7 @@ class ModelWrapper: def make_empty_exec_context(self): """Creates an empty execution context for this model. + The execution context is a dictionary of all tensors used for the inference computation. Any initializer values will be taken into account, all other tensors will be zero.""" @@ -248,14 +299,14 @@ class ModelWrapper: graph = self._model_proto.graph # make empty tensors for all the graph inputs and outputs for vi in graph.input: - new_tensor = util.valueinfo_to_tensor(vi) + new_tensor = onnxutil.valueinfo_to_tensor(vi) execution_context[vi.name] = new_tensor for vi in graph.output: - new_tensor = util.valueinfo_to_tensor(vi) + new_tensor = onnxutil.valueinfo_to_tensor(vi) execution_context[vi.name] = new_tensor # make empty tensors for all intermediate buffers for vi in graph.value_info: - new_tensor = util.valueinfo_to_tensor(vi) + new_tensor = onnxutil.valueinfo_to_tensor(vi) execution_context[vi.name] = new_tensor # fill in the constants provided by the initializers (TensorProto to npy) for t in graph.initializer: @@ -276,7 +327,7 @@ class ModelWrapper: return ret def get_tensor_fanout(self, tensor_name): - """Return the number of nodes for which the tensor with given name is + """Returns the number of nodes for which the tensor with given name is as input.""" graph = self.graph fanout = 0 @@ -285,40 +336,22 @@ class ModelWrapper: fanout += 1 return fanout - def set_attribute(self, node, attribute_name, value): - """Sets a custom node attribute of given name with given value""" - """Data types of attributes in onnx are encoded: - 2 : integer - 3 : string - so in the beginning a dictionary is introduced with the keys - to this encryption""" - # TO DO: Add additional encryption (i.e. float) - data_type_dict = {} - data_type_dict["string"] = 3 - data_type_dict["int"] = 2 - - attribute = util.get_by_name(node.attribute, attribute_name) - # check if attribute is integer - # For encryption see data_type_dict - if attribute.type == data_type_dict["int"]: - if type(value) is int: - attribute.i = value - else: - raise ValueError( - "Attribute expects integer! {} is of type {}!".format( - value, type(value) - ) - ) - elif attribute.type == data_type_dict["string"]: - if type(value) is str: - attribute.s = value.encode("UTF-8") - else: - raise ValueError( - "Attribute expects string! {} is of type {}!".format( - value, type(value) - ) - ) + def get_metadata_prop(self, key): + """Returns the value associated with metadata_prop with given key, + or None otherwise.""" + metadata_prop = util.get_by_name(self.model.metadata_props, key, "key") + if metadata_prop is None: + return None else: - raise Exception("This datatype is not supported, please add to encryption") + return metadata_prop.value - return attribute + def set_metadata_prop(self, key, value): + """Sets metadata property with given key to the given value.""" + metadata_prop = util.get_by_name(self.model.metadata_props, key, "key") + if metadata_prop is None: + metadata_prop = onnx.StringStringEntryProto() + metadata_prop.key = key + metadata_prop.value = value + self.model.metadata_props.append(metadata_prop) + else: + metadata_prop.value = value diff --git a/src/finn/core/onnx_exec.py b/src/finn/core/onnx_exec.py index 9fe5dae587f9d41a0f8fdc8123ad9d704ad310af..0f47a9104e3d2ef3ee06ef908e302344d78e0b17 100644 --- a/src/finn/core/onnx_exec.py +++ b/src/finn/core/onnx_exec.py @@ -1,28 +1,31 @@ -# Copyright (c) 2019, Xilinx +# Copyright (c) 2020, Xilinx # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of the <organization> nor the -# names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. # -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import copy @@ -31,58 +34,75 @@ import onnx.helper as helper import onnxruntime as rt import finn.core.execute_custom_node as ex_cu_node +from finn.core.modelwrapper import ModelWrapper +from finn.core.remote_exec import remote_exec +from finn.core.rtlsim_exec import rtlsim_exec +from finn.custom_op.registry import getCustomOp def execute_node(node, context, graph): - """Call onnxruntime to execute a single node. Input/output provided via context.""" - - # run node with custom function or by using onnxruntime - - if node.domain == "finn": + """Executes a single node by using onnxruntime, with custom function or + if dataflow partition by using remote execution or rtlsim. - ex_cu_node.execute_custom_node(node, context, graph) + Input/output provided via context.""" + if node.op_type == "StreamingDataflowPartition": + sdp_node = getCustomOp(node) + model = ModelWrapper(sdp_node.get_nodeattr("model")) + ret = execute_onnx(model, context, True) + context.update(ret) else: + if node.domain == "finn": - # onnxruntime unfortunately does not implement run_node as defined by ONNX, - # it can only execute entire models -- so we create a model which solely - # consists of our current node. - node_inputs = list(filter(lambda x: x.name in node.input, graph.input)) - node_inputs += list(filter(lambda x: x.name in node.input, graph.value_info)) - node_outputs = list(filter(lambda x: x.name in node.output, graph.output)) - node_outputs += list(filter(lambda x: x.name in node.output, graph.value_info)) - node_graph = helper.make_graph( - nodes=[node], - name="single-node-exec", - inputs=node_inputs, - outputs=node_outputs, - ) - node_model = helper.make_model(node_graph) - input_dict = dict() - for inp in node.input: - input_dict[inp] = context[inp] + ex_cu_node.execute_custom_node(node, context, graph) - sess = rt.InferenceSession(node_model.SerializeToString()) - output_list = sess.run(None, input_dict) + else: - for output_ind in range(len(node.output)): - outp = node.output[output_ind] - if output_list[output_ind].shape != context[outp].shape: - raise Exception( - """Output shapes disagree after node execution: - found %s vs expected %s""" - % ( - str(output_list[output_ind].shape.shape), - str(context[outp].shape), + # onnxruntime unfortunately does not implement run_node as defined by ONNX, + # it can only execute entire models -- so we create a model which solely + # consists of our current node. + node_inputs = list(filter(lambda x: x.name in node.input, graph.input)) + node_inputs += list( + filter(lambda x: x.name in node.input, graph.value_info) + ) + node_outputs = list(filter(lambda x: x.name in node.output, graph.output)) + node_outputs += list( + filter(lambda x: x.name in node.output, graph.value_info) + ) + node_graph = helper.make_graph( + nodes=[node], + name="single-node-exec", + inputs=node_inputs, + outputs=node_outputs, + ) + node_model = helper.make_model(node_graph) + input_dict = dict() + for inp in node.input: + input_dict[inp] = context[inp] + + sess = rt.InferenceSession(node_model.SerializeToString()) + output_list = sess.run(None, input_dict) + + for output_ind in range(len(node.output)): + outp = node.output[output_ind] + if output_list[output_ind].shape != context[outp].shape: + raise Exception( + """Output shapes disagree after node execution: + found %s vs expected %s""" + % ( + str(output_list[output_ind].shape.shape), + str(context[outp].shape), + ) ) - ) - context[outp] = output_list[output_ind] + context[outp] = output_list[output_ind] def execute_onnx(model, input_dict, return_full_exec_context=False): - """Execute given ONNX ModelWrapper with given named inputs. + """Executes given ONNX ModelWrapper with given named inputs. + If return_full_exec_context is False, a dict of named outputs is returned as indicated by the model.graph.output. + If return return_full_exec_context is True, the full set of tensors used by the execution (including inputs, weights, activations and final outputs) will be returned as a dict.""" @@ -111,13 +131,33 @@ def execute_onnx(model, input_dict, return_full_exec_context=False): str(input_dict[inp_name].shape), ) ) - else: - raise Exception("Provided input not found in graph context: %s" % inp_name) - # now call each node in the graph nodes list - # we can simply walk down the list since the ONNX spec guarantees that it is - # topologically sorted - for node in graph.node: - execute_node(node, execution_context, graph) + # else: + # raise Exception("Provided input not found in graph context: %s" % inp_name) + + # check if model has an execution mode set + # if None, execute model node by node using execute_node() + # if set to "remote_pynq" execute model on PYNQ board + # if set to "rtlsim" execute model using pyverilator + model_exec_mode = model.get_metadata_prop("exec_mode") + if (model_exec_mode is None) or (model_exec_mode == ""): + # execute the model node by node + # we can simply walk down the list since the ONNX spec guarantees that it is + # topologically sorted + for node in graph.node: + execute_node(node, execution_context, graph) + elif model_exec_mode == "remote_pynq": + # use remote exec metadata built into model to execute on a remote PYNQ + remote_exec(model, execution_context) + elif model_exec_mode == "rtlsim": + # use stitched IP for rtlsim + rtlsim_exec(model, execution_context) + else: + raise Exception( + """Metadata property "exec_mode" is set to an unknown value. + Can be left unset or has to be set to "remote_pynq" for remote execution + on PYNQ board or "rtlsim" for execution using pyverilator!""" + ) + if return_full_exec_context: return execution_context else: @@ -130,7 +170,7 @@ def execute_onnx(model, input_dict, return_full_exec_context=False): def execute_onnx_and_make_model(model, input_dict): - """Execute given ONNX ModelWrapper with given named inputs and return a new + """Executes given ONNX ModelWrapper with given named inputs and return a new ModelWrapper where an initializer is provided for each tensor as taken from the execution. This new model is useful for debugging, since it contains all the intermediate activation values.""" @@ -153,7 +193,8 @@ def compare_execution( input_dict, compare_fxn=lambda x, y: np.isclose(x, y, atol=1e-3).all(), ): - """Execute two ONNX models and compare their outputs using given function. + """Executes two ONNX models and compare their outputs using given function. + compare_fxn should take in two tensors and return a Boolean""" # compare values from first output tensors produced res_a = list(execute_onnx(model_a, input_dict).items())[0][1] diff --git a/src/finn/core/remote_exec.py b/src/finn/core/remote_exec.py new file mode 100644 index 0000000000000000000000000000000000000000..190bb857ad6e5448d49a6b742adc888f2bca79d2 --- /dev/null +++ b/src/finn/core/remote_exec.py @@ -0,0 +1,82 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess + +import numpy as np + + +def remote_exec(model, execution_context): + """Executes the given model remotely on the pynq board. The metadata properties + related to the pynq board have to be set. The execution context contains the + input values.""" + # TODO fix for multi input-output + pynq_ip = model.get_metadata_prop("pynq_ip") + pynq_username = model.get_metadata_prop("pynq_username") + pynq_password = model.get_metadata_prop("pynq_password") + pynq_target_dir = model.get_metadata_prop("pynq_target_dir") + deployment_dir = model.get_metadata_prop("pynq_deploy_dir") + inp = execution_context[model.graph.input[0].name] + np.save(os.path.join(deployment_dir, "input.npy"), inp) + # extracting last folder of absolute path (deployment_dir) + deployment_folder = os.path.basename(os.path.normpath(deployment_dir)) + # copy input to PYNQ board + cmd = "sshpass -p {} scp -r {}/input.npy {}@{}:{}/{}".format( + pynq_password, + deployment_dir, + pynq_username, + pynq_ip, + pynq_target_dir, + deployment_folder, + ) + bash_command = ["/bin/bash", "-c", cmd] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + + cmd = ( + "sshpass -p {} ssh {}@{} " + '"cd {}/{}; echo "xilinx" | sudo -S python3.6 driver.py"' + ).format(pynq_password, pynq_username, pynq_ip, pynq_target_dir, deployment_folder) + bash_command = ["/bin/bash", "-c", cmd] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + + cmd = "sshpass -p {} scp {}@{}:{}/{}/output.npy {}".format( + pynq_password, + pynq_username, + pynq_ip, + pynq_target_dir, + deployment_folder, + deployment_dir, + ) + bash_command = ["/bin/bash", "-c", cmd] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + outp = np.load("{}/output.npy".format(deployment_dir)) + execution_context[model.graph.output[0].name] = outp diff --git a/src/finn/core/rtlsim_exec.py b/src/finn/core/rtlsim_exec.py new file mode 100644 index 0000000000000000000000000000000000000000..0841fedebcd473a488b2e62db4dc763f283789e1 --- /dev/null +++ b/src/finn/core/rtlsim_exec.py @@ -0,0 +1,166 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os + +from finn.custom_op.registry import getCustomOp +from finn.util.data_packing import npy_to_rtlsim_input, rtlsim_output_to_npy +from finn.util.fpgadataflow import ( + pyverilate_get_liveness_threshold_cycles, + pyverilate_stitched_ip, +) + + +def rtlsim_exec(model, execution_context): + """Use PyVerilator to execute given model with stitched IP. The execution + context contains the input values.""" + + # ensure stitched ip project already exists + assert os.path.isfile( + model.get_metadata_prop("wrapper_filename") + ), """The + file name from metadata property "wrapper_filename" doesn't exist.""" + assert os.path.isdir( + model.get_metadata_prop("vivado_stitch_proj") + ), """The + directory from metadata property "vivado_stitch_proj" doesn't exist""" + trace_file = model.get_metadata_prop("rtlsim_trace") + # extract input shape + # TODO extend for multiple inputs + i_name = model.graph.input[0].name + i_tensor = execution_context[i_name] + i_dt = model.get_tensor_datatype(i_name) + first_node = getCustomOp(model.find_consumer(i_name)) + i_stream_w = first_node.get_instream_width() + # convert input into time multiplexed shape + i_folded_shape = first_node.get_folded_input_shape() + # TODO any other layout transformations need to happen here! + i_tensor = i_tensor.reshape(i_folded_shape) + # extract output shape + o_name = model.graph.output[0].name + o_shape = model.get_tensor_shape(o_name) + o_dt = model.get_tensor_datatype(o_name) + last_node = getCustomOp(model.find_producer(o_name)) + o_folded_shape = last_node.get_folded_output_shape() + o_stream_w = last_node.get_outstream_width() + packedBits = o_stream_w + targetBits = o_dt.bitwidth() + # pack input + packed_input = npy_to_rtlsim_input(i_tensor, i_dt, i_stream_w) + num_out_values = last_node.get_number_output_values() + # prepare pyverilator model + sim = pyverilate_stitched_ip(model) + _reset_rtlsim(sim) + _toggle_clk(sim) + ret = _run_rtlsim(sim, packed_input, num_out_values, trace_file) + packed_output = ret[0] + model.set_metadata_prop("sim_cycles", str(ret[1])) + # unpack output and put into context + o_folded_tensor = rtlsim_output_to_npy( + packed_output, "out.npy", o_dt, o_folded_shape, packedBits, targetBits + ) + execution_context[o_name] = o_folded_tensor.reshape(o_shape) + + +# TODO move the rtlsim functions below into a common location such as utils +def _reset_rtlsim(sim): + """Sets reset input in pyverilator to zero, toggles the clock and set it + back to one""" + sim.io.ap_rst_n_0 = 0 + sim.io.ap_clk_0 = 1 + sim.io.ap_clk_0 = 0 + sim.io.ap_rst_n_0 = 1 + + +def _toggle_clk(sim): + """Toggles the clock input in pyverilator once.""" + sim.io.ap_clk_0 = 1 + sim.io.ap_clk_0 = 0 + + +def _run_rtlsim(sim, inp, num_out_values, trace_file=None): + """Runs the pyverilator simulation by passing the input values to the simulation, + toggle the clock and observing the execution time. Argument num_out_values contains + the number of expected output values, so the simulation is closed after all + outputs are calculated. Function contains also an observation loop that can + abort the simulation if no output value is produced after a certain time + (liveness_threshold from function pyverilate_get_liveness_threshold_cycles() + from finn.util.fpgadataflow)""" + inputs = inp + outputs = [] + sim.io.out_r_0_tready = 1 + + # observe if output is completely calculated + # observation_count will contain the number of cycles the calculation ran + output_observed = False + observation_count = 0 + + # avoid infinite looping of simulation by aborting when there is no change in + # output values after LIVENESS_THRESHOLD cycles + no_change_count = 0 + old_outputs = outputs + liveness_threshold = pyverilate_get_liveness_threshold_cycles() + + if trace_file is not None: + sim.start_vcd_trace(trace_file) + + while not (output_observed): + sim.io.in0_V_V_0_tvalid = 1 if len(inputs) > 0 else 0 + sim.io.in0_V_V_0_tdata = inputs[0] if len(inputs) > 0 else 0 + if sim.io.in0_V_V_0_tready == 1 and sim.io.in0_V_V_0_tvalid == 1: + inputs = inputs[1:] + if sim.io.out_r_0_tvalid == 1 and sim.io.out_r_0_tready == 1: + outputs = outputs + [sim.io.out_r_0_tdata] + sim.io.ap_clk_0 = 1 + sim.io.ap_clk_0 = 0 + + observation_count = observation_count + 1 + no_change_count = no_change_count + 1 + + if len(outputs) == num_out_values: + sim_cycles = observation_count + output_observed = True + + if no_change_count == liveness_threshold: + if old_outputs == outputs: + if trace_file is not None: + sim.flush_vcd_trace() + sim.stop_vcd_trace() + raise Exception( + "Error in simulation! Takes too long to produce output." + "Consider setting the LIVENESS_THRESHOLD env.var. to a " + "larger value." + ) + else: + no_change_count = 0 + old_outputs = outputs + if trace_file is not None: + sim.flush_vcd_trace() + sim.stop_vcd_trace() + + return (outputs, sim_cycles) diff --git a/src/finn/custom_op/__init__.py b/src/finn/custom_op/__init__.py index 797916cf20ce9c61cc349df41383b687ed0c101b..39de40f1e2024b1bb5dea0b20f39587151539da5 100644 --- a/src/finn/custom_op/__init__.py +++ b/src/finn/custom_op/__init__.py @@ -1,9 +1,41 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from abc import ABC, abstractmethod -from finn.core.utils import get_by_name +from finn.util.basic import get_by_name import onnx.helper as helper class CustomOp(ABC): + """CustomOp class all custom op nodes are based on. Contains different functions + every custom node should have. Some as abstract methods, these have to be + filled when writing a new custom op node.""" + def __init__(self, onnx_node): super().__init__() self.onnx_node = onnx_node @@ -83,5 +115,5 @@ class CustomOp(ABC): def verify_node(self): """Verifies that all attributes the node needs are there and that particular attributes are set correctly. Also checks if - the number of inputs is equal to the expected number""" + the number of inputs is equal to the expected number.""" pass diff --git a/src/finn/custom_op/fpgadataflow/__init__.py b/src/finn/custom_op/fpgadataflow/__init__.py index f275263a4ed094516250b79adaac4ce87d896384..7f13b43d57d9fe2f6de5e5ed9bb52214611f1098 100644 --- a/src/finn/custom_op/fpgadataflow/__init__.py +++ b/src/finn/custom_op/fpgadataflow/__init__.py @@ -1,56 +1,159 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from abc import abstractmethod import numpy as np import os import subprocess from finn.custom_op import CustomOp -from finn.core.utils import CppBuilder +from finn.util.basic import CppBuilder +from finn.util.fpgadataflow import ( + IPGenBuilder, + pyverilate_get_liveness_threshold_cycles, +) +from . import templates class HLSCustomOp(CustomOp): + """HLSCustomOp class all custom ops that correspond to a finn-hlslib + function are based on. Contains different functions every fpgadataflow + custom node should have. Some as abstract methods, these have to be filled + when writing a new fpgadataflow custom op node.""" + def __init__(self, onnx_node): super().__init__(onnx_node) + + self.code_gen_dict = {} + + # getting templates from templates.py + # template for single node execution - self.docompute_template = """ - #include "cnpy.h" - #include "npy2apintstream.hpp" - #include <vector> - #include "bnn-library.h" + self.docompute_template = templates.docompute_template - // includes for network parameters - $GLOBALS$ + # templates for single node ip generation + # cpp file + self.ipgen_template = templates.ipgen_template + # tcl script + self.ipgentcl_template = templates.ipgentcl_template - // defines for network parameters - $DEFINES$ + def get_nodeattr_types(self): + return { + "backend": ("s", True, "fpgadataflow"), + "code_gen_dir_npysim": ("s", False, ""), + "code_gen_dir_ipgen": ("s", False, ""), + "executable_path": ("s", False, ""), + "ipgen_path": ("s", False, ""), + "exec_mode": ("s", False, ""), + "sim_cycles": ("i", False, 0), + "rtlsim_trace": ("s", False, ""), + } - int main(){ + def node_res_estimation(self): + """Returns summarized resource estimation of BRAMs and LUTs + of the node.""" + resources = [] + resources.append("BRAMs: " + str(self.bram_estimation())) + resources.append("LUTs: " + str(self.lut_estimation())) + return resources - $STREAMDECLARATIONS$ + def bram_estimation(self): + """Function for BRAM resource estimation, is member function of + HLSCustomOp class but has to be filled by every node""" + return 0 - $READNPYDATA$ + def lut_estimation(self): + """Function for LUT resource estimation, is member function of + HLSCustomOp class but has to be filled by every node""" + return 0 - $DOCOMPUTE$ + def code_generation_ipgen(self, model, fpgapart, clk): + """Generates c++ code and tcl script for ip generation.""" + node = self.onnx_node - $DATAOUTSTREAM$ + # generate top cpp file for ip generation + path = self.get_nodeattr("code_gen_dir_ipgen") + self.generate_params(model, path) + self.global_includes() + self.defines("ipgen") + self.blackboxfunction() + self.pragmas() + self.docompute() - $SAVEASCNPY$ + template = self.ipgen_template - } + for key in self.code_gen_dict: + # transform list into long string separated by '\n' + code_gen_line = "\n".join(self.code_gen_dict[key]) + template = template.replace(key, code_gen_line) + code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen") + f = open(os.path.join(code_gen_dir, "top_{}.cpp".format(node.name)), "w") + f.write(template) + f.close() + self.code_gen_dict.clear() - """ - self.code_gen_dict = {} + # generate tcl script for ip generation + self.code_gen_dict["$PROJECTNAME$"] = ["project_{}".format(node.name)] + self.code_gen_dict["$HWSRCDIR$"] = [code_gen_dir] + self.code_gen_dict["$FPGAPART$"] = [fpgapart] + self.code_gen_dict["$FINNHLSLIBDIR$"] = ["/workspace/finn-hlslib"] + self.code_gen_dict["$TOPFXN$"] = [node.name] + self.code_gen_dict["$CLKPERIOD$"] = [str(clk)] - def get_nodeattr_types(self): - return { - "backend": ("s", True, "fpgadataflow"), - "code_gen_dir": ("s", False, ""), - "executable_path": ("s", False, ""), - } + template = self.ipgentcl_template + + for key in self.code_gen_dict: + # transform list into long string separated by '\n' + code_gen_line = "\n".join(self.code_gen_dict[key]) + template = template.replace(key, code_gen_line) + code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen") + f = open(os.path.join(code_gen_dir, "hls_syn_{}.tcl".format(node.name)), "w") + f.write(template) + f.close() + self.code_gen_dict.clear() - def code_generation(self, model): + def ipgen_singlenode_code(self): + """Builds the bash script for ip generation using the IPGenBuilder from + finn.util.fpgadataflow.""" node = self.onnx_node - self.generate_params(model) + code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen") + builder = IPGenBuilder() + builder.append_tcl(code_gen_dir + "/hls_syn_{}.tcl".format(node.name)) + builder.set_ipgen_path(code_gen_dir + "/project_{}".format(node.name)) + builder.build(code_gen_dir) + self.set_nodeattr("ipgen_path", builder.ipgen_path) + + def code_generation_npysim(self, model): + """Generates c++ code for simulation (npysim).""" + node = self.onnx_node + path = self.get_nodeattr("code_gen_dir_npysim") + self.generate_params(model, path) self.global_includes() - self.defines() + self.defines("npysim") self.read_npy_data() self.strm_decl() self.docompute() @@ -63,20 +166,23 @@ class HLSCustomOp(CustomOp): # transform list into long string separated by '\n' code_gen_line = "\n".join(self.code_gen_dict[key]) template = template.replace(key, code_gen_line) - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") f = open(os.path.join(code_gen_dir, "execute_{}.cpp".format(node.op_type)), "w") f.write(template) f.close() + self.code_gen_dict.clear() def compile_singlenode_code(self): - code_gen_dir = self.get_nodeattr("code_gen_dir") + """Builds the bash script for compilation using the CppBuilder from + finn.util.basic and executes the script to produce the executable.""" + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") builder = CppBuilder() # to enable additional debug features please uncommand the next line # builder.append_includes("-DDEBUG") builder.append_includes("-I/workspace/finn/src/finn/data/cpp") builder.append_includes("-I/workspace/cnpy/") builder.append_includes("-I/workspace/finn-hlslib") - builder.append_includes("-I/workspace/vivado-hlslib") + builder.append_includes("-I{}/include".format(os.environ["VIVADO_PATH"])) builder.append_includes("--std=c++11") builder.append_sources(code_gen_dir + "/*.cpp") builder.append_sources("/workspace/cnpy/cnpy.cpp") @@ -86,12 +192,15 @@ class HLSCustomOp(CustomOp): self.set_nodeattr("executable_path", builder.executable_path) def dynamic_input_to_npy(self, context, count): + """Saves input (given context) into .npy files. + + Count indicates the number of inputs that have to be saved.""" node = self.onnx_node - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") if code_gen_dir == "": raise Exception( """ -Found no codegen dir for this node, did you run the codegen transformation? +Found no codegen dir for this node, did you run the codegen_npysim transformation? """ ) # create a npy file for each input of the node (in_ind is input index) @@ -104,14 +213,16 @@ Found no codegen dir for this node, did you run the codegen transformation? ) def npy_to_dynamic_output(self, context): + """Reads the output from a .npy file and saves it at the right place in + the context dictionary.""" # TODO support multi-output nodes as needed node = self.onnx_node - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") output = np.load("{}/output.npy".format(code_gen_dir)) context[node.output[0]] = output def exec_precompiled_singlenode_model(self): - # execute precompiled executable + """Executes precompiled executable.""" executable_path = self.get_nodeattr("executable_path") if executable_path == "": raise Exception( @@ -123,41 +234,191 @@ compilation transformations? process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE) process_execute.communicate() + def reset_rtlsim(self, sim): + """Sets reset input in pyverilator to zero, toggles the clock and set it + back to one""" + sim.io.ap_rst_n = 0 + sim.io.ap_clk = 1 + sim.io.ap_clk = 0 + sim.io.ap_rst_n = 1 + + def toggle_clk(self, sim): + """Toggles the clock input in pyverilator once.""" + sim.io.ap_clk = 1 + sim.io.ap_clk = 0 + + def rtlsim(self, sim, inp): + """Runs the pyverilator simulation by passing the input values to the simulation, + toggle the clock and observing the execution time. Function contains also an + observation loop that can abort the simulation if no output value is produced + after 100 cycles.""" + + trace_file = self.get_nodeattr("rtlsim_trace") + if trace_file != "": + if trace_file == "default": + trace_file = self.onnx_node.name + ".vcd" + sim.start_vcd_trace(trace_file) + inputs = inp + outputs = [] + sim.io.out_V_V_TREADY = 1 + + # observe if output is completely calculated + # observation_count will contain the number of cycles the calculation ran + num_out_values = self.get_number_output_values() + output_observed = False + observation_count = 0 + + # avoid infinite looping of simulation by aborting when there is no change in + # output values after 100 cycles + no_change_count = 0 + old_outputs = outputs + liveness_threshold = pyverilate_get_liveness_threshold_cycles() + + while not (output_observed): + sim.io.in0_V_V_TVALID = 1 if len(inputs) > 0 else 0 + sim.io.in0_V_V_TDATA = inputs[0] if len(inputs) > 0 else 0 + if sim.io.in0_V_V_TREADY == 1 and sim.io.in0_V_V_TVALID == 1: + inputs = inputs[1:] + if sim.io.out_V_V_TVALID == 1 and sim.io.out_V_V_TREADY == 1: + outputs = outputs + [sim.io.out_V_V_TDATA] + sim.io.ap_clk = 1 + sim.io.ap_clk = 0 + + observation_count = observation_count + 1 + no_change_count = no_change_count + 1 + + if len(outputs) == num_out_values: + self.set_nodeattr("sim_cycles", observation_count) + output_observed = True + + if no_change_count == liveness_threshold: + if old_outputs == outputs: + if trace_file != "": + sim.flush_vcd_trace() + sim.stop_vcd_trace() + raise Exception( + "Error in simulation! Takes too long to produce output. " + "Consider setting the LIVENESS_THRESHOLD env.var. to a " + "larger value." + ) + else: + no_change_count = 0 + old_outputs = outputs + if trace_file != "": + sim.flush_vcd_trace() + sim.stop_vcd_trace() + return outputs + def execute_node(self, context, graph): - # save input(s) - self.dynamic_input_to_npy(context, 1) - # execute the precompiled model - self.exec_precompiled_singlenode_model() - # load output npy file - self.npy_to_dynamic_output(context) - - def generate_params(self, model): + """Executes single node using npysim or rtlsim.""" + mode = self.get_nodeattr("exec_mode") + if mode == "npysim": + # save input(s) + self.dynamic_input_to_npy(context, 1) + # execute the precompiled model + self.exec_precompiled_singlenode_model() + # load output npy file + self.npy_to_dynamic_output(context) + elif mode == "rtlsim": + pass + + else: + raise Exception( + """Invalid value for attribute exec_mode! Is currently set to: {} + has to be set to one of the following value ("npysim", "rtlsim")""".format( + mode + ) + ) + + def generate_params(self, model, path): + """Function to generate parameters (i.e. weights and thresholds), + is member function of HLSCustomOp class but has to be filled + by every node.""" + pass + + @abstractmethod + def get_number_output_values(self): + """Function to get the number of expected output values, + is member function of HLSCustomOp class but has to be filled + by every node.""" pass @abstractmethod def global_includes(self): + """Function to set the global includes for c++ code that has to be generated + for npysim or rtlsim, is member function of HLSCustomOp class but has to + be filled by every node.""" pass @abstractmethod - def defines(self): + def defines(self, var): + """Function to set the define commands for c++ code that has to be generated + for npysim or rtlsim, is member function of HLSCustomOp class but has to + be filled by every node. + + var: makes it possible to reuse the function for different c++ code generation. + I.e. if set to "ipgen" in StreamingFCLayer_Batch additional PRAGMA defines are + added.""" pass @abstractmethod def read_npy_data(self): + """Function to generate the commands for reading data from .npy file in c++, + is member function of HLSCustomOp class but has to be filled by every node.""" pass @abstractmethod def strm_decl(self): + """Function to generate the commands for the stream declaration in c++, + is member function of HLSCustomOp class but has to be filled + by every node.""" pass @abstractmethod def docompute(self): + """Function to generate the commands for the computational part of the + c++ code, is member function of HLSCustomOp class but has to be filled + by every node.""" pass @abstractmethod def dataoutstrm(self): + """Function to generate the commands for reading out data from c++ and convert + into npy format, is member function of HLSCustomOp class but has to be filled + by every node.""" pass @abstractmethod def save_as_npy(self): + """Function to generate the commands for saving data in .npy file in c++, + is member function of HLSCustomOp class but has to be filled by every node.""" + pass + + @abstractmethod + def blackboxfunction(self): + """Function to generate a blackbock function in c++ from which an IP block + will be generated, is member function of HLSCustomOp class but has to be filled + by every node.""" + pass + + @abstractmethod + def pragmas(self): + """Function to generate the pragma commands in c++, is member function of + HLSCustomOp class but has to be filled by every node.""" pass + + def get_folded_input_shape(self): + """Returns folded input shape (according to synapse folding), if implemented.""" + raise Exception("get_folded_input_shape not implemented for this op") + + def get_folded_output_shape(self): + """Returns folded output shape (according to neuron folding), if implemented.""" + raise Exception("get_folded_output_shape not implemented for this op") + + def get_instream_width(self): + """Returns input stream width, if implemented.""" + raise Exception("get_instream_width not implemented for this op") + + def get_outstream_width(self): + """Returns output stream width, if implemented.""" + raise Exception("get_outstream_width not implemented for this op") diff --git a/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py b/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py index 560aa3268339aab5e3d707d766910bf709a21eaf..14016ce9ce22c729ad3279fb90dc900f88fda8ba 100644 --- a/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py +++ b/src/finn/custom_op/fpgadataflow/convolutioninputgenerator.py @@ -1,12 +1,44 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os import numpy as np +from pyverilator import PyVerilator from finn.core.datatype import DataType from finn.custom_op.fpgadataflow import HLSCustomOp class ConvolutionInputGenerator(HLSCustomOp): + """Class that corresponds to finn-hlslib ConvolutionInputGenerator + (sliding window) function.""" + def __init__(self, onnx_node): super().__init__(onnx_node) @@ -35,54 +67,148 @@ class ConvolutionInputGenerator(HLSCustomOp): def verify_node(self): pass + def bram_estimation(self): + pass + + def lut_estimation(self): + pass + def get_input_datatype(self): + """Returns FINN DataType of input.""" return DataType[self.get_nodeattr("inputDataType")] def get_output_datatype(self): + """Returns FINN DataType of output.""" return DataType[self.get_nodeattr("outputDataType")] def get_stream_width(self): + """Returns stream width, input and output stream width are equal for + the sliding window function""" return self.get_nodeattr("SIMD") * self.get_nodeattr("Input_precision") + def get_number_output_values(self): + k = self.get_nodeattr("ConvKernelDim") + ifm_ch = self.get_nodeattr("IFMChannels") + ofm_dim = self.get_nodeattr("OFMDim") + out_pix = ofm_dim * ofm_dim + + return out_pix * k * k * ifm_ch + def execute_node(self, context, graph): + mode = self.get_nodeattr("exec_mode") node = self.onnx_node k = self.get_nodeattr("ConvKernelDim") ifm_dim = self.get_nodeattr("IFMDim") ifm_ch = self.get_nodeattr("IFMChannels") ofm_dim = self.get_nodeattr("OFMDim") out_pix = ofm_dim * ofm_dim - idt = self.get_input_datatype() - if idt == DataType.BIPOLAR: - # use binary for bipolar storage - idt = DataType.BINARY - - # TODO ensure codegen dir exists - code_gen_dir = self.get_nodeattr("code_gen_dir") - # create a npy file for input of the node - - inp = context[node.input[0]] - assert str(inp.dtype) == "float32" - assert inp.shape == (1, ifm_ch, ifm_dim, ifm_dim) - reshaped_inp = inp.transpose(0, 2, 3, 1) - np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_inp) - # execute the precompiled model - super().exec_precompiled_singlenode_model() - # load output npy file - super().npy_to_dynamic_output(context) - if self.get_output_datatype() == DataType.BIPOLAR: - out = context[node.output[0]] - out = 2 * out - 1 - context[node.output[0]] = out - assert context[node.output[0]].shape == (1, out_pix, k * k, ifm_ch) - # reshape output to have expected shape - context[node.output[0]] = context[node.output[0]].reshape( - 1, out_pix, k * k * ifm_ch - ) + + if mode == "npysim": + idt = self.get_input_datatype() + if idt == DataType.BIPOLAR: + # use binary for bipolar storage + idt = DataType.BINARY + + # TODO ensure codegen dir exists + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") + # create a npy file for input of the node + + inp = context[node.input[0]] + assert str(inp.dtype) == "float32", "Input datatype is not float32" + assert inp.shape == ( + 1, + ifm_ch, + ifm_dim, + ifm_dim, + ), """Input shape doesn't + match expected shape (1, ifm_ch, ifm_dim, ifm_dim).""" + reshaped_inp = inp.transpose(0, 2, 3, 1) + np.save(os.path.join(code_gen_dir, "input_0.npy"), reshaped_inp) + # execute the precompiled model + super().exec_precompiled_singlenode_model() + # load output npy file + super().npy_to_dynamic_output(context) + if self.get_output_datatype() == DataType.BIPOLAR: + out = context[node.output[0]] + out = 2 * out - 1 + context[node.output[0]] = out + assert context[node.output[0]].shape == ( + 1, + out_pix, + k * k, + ifm_ch, + ), """Output + shape doesn't match expected shape (1, out_pix, k*k, ifm_ch).""" + # reshape output to have expected shape + context[node.output[0]] = context[node.output[0]].reshape( + 1, out_pix, k * k * ifm_ch + ) + elif mode == "rtlsim": + code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen") + prefixed_top_name = "%s_%s" % (node.name, node.name) + # check if needed file exists + verilog_file = "{}/project_{}/sol1/impl/verilog/{}.v".format( + code_gen_dir, node.name, prefixed_top_name + ) + if os.path.isfile(verilog_file): + inp = context[node.input[0]] + inp = inp.transpose(0, 2, 3, 1) + inp = inp.flatten() + + # TODO: check how to sort inputs for multichannel inputs + # a = [] + # for i in range(len(inp)): + # if (i+1) % 2 == 0: + # a.append((int(inp[i-1]) << 1) + int(inp[i])) + # inp = a + sim = PyVerilator.build( + verilog_file, + verilog_path=[ + "{}/project_{}/sol1/impl/verilog/".format( + code_gen_dir, node.name + ) + ], + ) + super().reset_rtlsim(sim) + super().toggle_clk(sim) + output = self.rtlsim(sim, inp) + output = [int(x) for x in output] + odt = self.get_output_datatype() + if odt == DataType.BIPOLAR: + output = [2 * x - 1 for x in output] + + # pyverilator interprets int2 as uint2, so output has to be corrected + elif odt == DataType.INT2: + mask = 2 ** (odt.bitwidth() - 1) + output = [-(x & mask) + (x & ~mask) for x in output] + # TODO: check how to sort inputs for multichannel inputs + # output = [bin(x)[2:].zfill(ifm_ch) for x in output] + # output_ch1 = [int(x[:1]) for x in output] + # output_ch2 = [int(x[1:]) for x in output] + + # reshape output + output = np.asarray([output], dtype=np.float32).reshape( + 1, out_pix, k * k * ifm_ch + ) + context[node.output[0]] = output + + else: + raise Exception( + """Found no verilog files for this node, + did you run the codegen_ipgen transformation?""" + ) + else: + raise Exception( + """Invalid value for attribute exec_mode! Is currently set to: {} + has to be set to one of the following value ("npysim", "rtlsim")""".format( + mode + ) + ) def global_includes(self): self.code_gen_dict["$GLOBALS$"] = ['#include "slidingwindow.h"'] - def defines(self): + def defines(self, var): numReps = 1 self.code_gen_dict["$DEFINES$"] = [ """#define ConvKernelDim1 {}\n #define IFMChannels1 {} @@ -100,7 +226,7 @@ class ConvolutionInputGenerator(HLSCustomOp): ] def read_npy_data(self): - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") dtype = self.get_input_datatype() if dtype == DataType.BIPOLAR: # use binary for bipolar storage @@ -131,12 +257,12 @@ class ConvolutionInputGenerator(HLSCustomOp): self.code_gen_dict["$DOCOMPUTE$"] = [ """{}<ConvKernelDim1, IFMChannels1, Input_precision1, IFMDim1, OFMDim1, SIMD1, Stride1> (in0, out, numReps);""".format( - node.op_type, + node.op_type ) ] def dataoutstrm(self): - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") dtype = self.get_output_datatype() if dtype == DataType.BIPOLAR: # use binary for bipolar storage @@ -168,3 +294,18 @@ class ConvolutionInputGenerator(HLSCustomOp): def save_as_npy(self): self.code_gen_dict["$SAVEASCNPY$"] = [] + + def blackboxfunction(self): + self.code_gen_dict["$BLACKBOXFUNCTION$"] = [ + """void {}(hls::stream<ap_uint<SIMD1*Input_precision1>> &in0, + hls::stream<ap_uint<SIMD1*Input_precision1>> &out)""".format( + self.onnx_node.name + ) + ] + + def pragmas(self): + self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"] + self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out") + self.code_gen_dict["$PRAGMAS$"].append( + "#pragma HLS INTERFACE ap_ctrl_none port=return" + ) diff --git a/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py b/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py index 975da666f0ba728f542b06865aaa2c66c5f07c07..a2c88c5d6eca723f4e853e09a685bc6478e9a01c 100644 --- a/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py +++ b/src/finn/custom_op/fpgadataflow/streamingfclayer_batch.py @@ -1,22 +1,61 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import math import os import numpy as np +from pyverilator import PyVerilator -from finn.backend.fpgadataflow.utils import numpy_to_hls_code from finn.core.datatype import DataType -from finn.core.utils import interleave_matrix_outer_dim_from_partitions from finn.custom_op.fpgadataflow import HLSCustomOp +from finn.util.basic import interleave_matrix_outer_dim_from_partitions +from finn.util.data_packing import ( + npy_to_rtlsim_input, + numpy_to_hls_code, + rtlsim_output_to_npy, +) + +# ONNX i/o tensor shape assumptions for StreamingFCLayer: +# input 0 is the input vector, shape (1, i_size) = (1, MW) +# input 1 is the weight vector, shape (i_size, o_size) = (MW, MH) +# (optional) input 2 is the threshold vector, shape (o_size, n_thres) +# output 0 is the output vector, shape (1, o_size) = (1, MH) class StreamingFCLayer_Batch(HLSCustomOp): + """Class that corresponds to finn-hls StreamingFCLayer_Batch function.""" + def __init__(self, onnx_node): super().__init__(onnx_node) def get_nodeattr_types(self): my_attrs = { - # "backend": ("s", True, "fpgadataflow"), - # "code_gen_dir": ("s", True, ""), - # "executable_path": ("s", True, ""), "PE": ("i", True, 0), "SIMD": ("i", True, 0), "MW": ("i", True, 0), @@ -32,21 +71,26 @@ class StreamingFCLayer_Batch(HLSCustomOp): "binaryXnorMode": ("i", False, 0), # no-activation mode (produce accumulators) "noActivation": ("i", False, 0), + # input and output FIFO depths + "inFIFODepth": ("i", False, 0), + "outFIFODepth": ("i", False, 0), } my_attrs.update(super().get_nodeattr_types()) return my_attrs def calc_wmem(self): + """Calculates and returns WMEM.""" mw = self.get_nodeattr("MW") mh = self.get_nodeattr("MH") pe = self.get_nodeattr("PE") simd = self.get_nodeattr("SIMD") - assert mh % pe == 0 - assert mw % simd == 0 + assert mh % pe == 0, "Requirement MH divisable by PE is violated." + assert mw % simd == 0, "Requirement MW divisable by SIMD is violated." wmem = mw * mh // (pe * simd) return wmem def calc_tmem(self): + """Calculates and returns TMEM.""" if self.get_nodeattr("noActivation") == 1: return 0 else: @@ -90,8 +134,9 @@ class StreamingFCLayer_Batch(HLSCustomOp): info_messages.append('Attribute backend should be set to "fpgadataflow"') # verify that all necessary attributes exist + # TODO collect automatically from get_nodeattr_types try: - self.get_nodeattr("code_gen_dir") + self.get_nodeattr("code_gen_dir_npysim") self.get_nodeattr("executable_path") self.get_nodeattr("resType") self.get_nodeattr("MW") @@ -109,7 +154,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): info_messages.append( """The necessary attributes do not exist. StreamingFCLayer_Batch needs the following attributes: - code_gen_dir, executable_path, resType, MW, MH, SIMD, PE, + code_gen_dir_npysim, executable_path, resType, MW, MH, SIMD, PE, inputDataType, weightDataType, outputDataType, ActVal, binaryXnorMode, noActivation""" ) @@ -144,13 +189,56 @@ class StreamingFCLayer_Batch(HLSCustomOp): return info_messages + def bram_estimation(self): + """Calculates resource estimation for BRAM based on: + - FINN-R: An End-to-End Deep-Learning Framework for Fast + Exploration of Quantized Neural Networks + - M. Blott, T. B. Preusser, N. J. Fraser, G. Gambardella, K. O'Brien, + Y. Umuroglu, M. Leeser and K. Vissers + - 12. Sep 2018 + """ + # TODO add in/out FIFO contributions + P = self.get_nodeattr("PE") + Q = self.get_nodeattr("SIMD") + wdt = self.get_weight_datatype() + W = wdt.bitwidth() + D_in = self.get_instream_width() + D_out = self.get_outstream_width() + omega = (D_in * D_out) / (Q * P) + return P * (math.ceil(omega / 512)) * (math.ceil((Q * W) / 36)) + + def lut_estimation(self): + """Calculates resource estimations for LUTs based on: + - FINN-R: An End-to-End Deep-Learning Framework for Fast + Exploration of Quantized Neural Networks + - M. Blott, T. B. Preusser, N. J. Fraser, G. Gambardella, K. O'Brien, + Y. Umuroglu, M. Leeser and K. Vissers + - 12. Sep 2018 + """ + # TODO add in/out FIFO contributions + P = self.get_nodeattr("PE") + Q = self.get_nodeattr("SIMD") + wdt = self.get_weight_datatype() + W = wdt.bitwidth() + # determine tdt with input and weight data types + idt = self.get_input_datatype() + A = idt.bitwidth() + # parameters from experiments in paper mentioned above + c0 = 300 + c1 = 1.1 + + return c0 + c1 * (P * Q) * (W * A) + def get_input_datatype(self): + """Returns FINN DataType of input.""" return DataType[self.get_nodeattr("inputDataType")] def get_weight_datatype(self): + """Returns FINN DataType of weights.""" return DataType[self.get_nodeattr("weightDataType")] def get_output_datatype(self): + """Returns FINN DataType of output.""" return DataType[self.get_nodeattr("outputDataType")] def get_instream_width(self): @@ -161,7 +249,25 @@ class StreamingFCLayer_Batch(HLSCustomOp): o_bits = self.get_output_datatype().bitwidth() return o_bits * self.get_nodeattr("PE") + def get_folded_input_shape(self): + mw = self.get_nodeattr("MW") + simd = self.get_nodeattr("SIMD") + sf = mw // simd + return (1, sf, simd) + + def get_folded_output_shape(self): + mh = self.get_nodeattr("MH") + pe = self.get_nodeattr("PE") + nf = mh // pe + return (1, nf, pe) + + def get_number_output_values(self): + nf = self.get_folded_output_shape()[1] + return nf + def get_template_param_values(self): + """Returns the template parameter values according to input, output and weight + data types.""" ret = dict() inp_hls_str = self.get_input_datatype().get_hls_datatype_str() out_hls_str = self.get_output_datatype().get_hls_datatype_str() @@ -212,9 +318,13 @@ class StreamingFCLayer_Batch(HLSCustomOp): pe = self.get_nodeattr("PE") simd = self.get_nodeattr("SIMD") wmem = self.calc_wmem() - assert orig_weight_matrix.shape == (mw, mh) - assert mw % simd == 0 - assert mh % pe == 0 + assert orig_weight_matrix.shape == ( + mw, + mh, + ), """Weights matrix doesn't + have expected shape (mw, mh)""" + assert mw % simd == 0, "Requirement MH divisable by SIMD is violated." + assert mh % pe == 0, "Requirement MH divisable by PE is violated." # start by transposing the original weight matrix, since ONNX and # finn-hlslib use different assumptions # ONNX uses (in_features, out_features) and matmul(x, W) @@ -228,6 +338,8 @@ class StreamingFCLayer_Batch(HLSCustomOp): ret = interleave_matrix_outer_dim_from_partitions(ret, pe) # create SIMD as innermost dimension and add a dummy outer dim ret = ret.reshape(1, pe, wmem, simd) + # reverse the SIMD dimension + ret = np.flip(ret, axis=-1) return ret def get_hls_compatible_threshold_tensor(self, orig_thres_matrix): @@ -241,8 +353,11 @@ class StreamingFCLayer_Batch(HLSCustomOp): mh = self.get_nodeattr("MH") pe = self.get_nodeattr("PE") tmem = mh // pe - assert mh % pe == 0 - assert orig_thres_matrix.ndim == 2 + assert mh % pe == 0, "Requirement MH divisable by PE is violated." + assert ( + orig_thres_matrix.ndim == 2 + ), """Threshold matrix dimension is + not as expected (2).""" n_thres_steps = orig_thres_matrix.shape[1] inp_is_bipolar = self.get_input_datatype() == DataType.BIPOLAR wt_is_bipolar = self.get_weight_datatype() == DataType.BIPOLAR @@ -261,15 +376,28 @@ class StreamingFCLayer_Batch(HLSCustomOp): # ensure channels = mh , duplicating if necessary if ret.shape[0] == 1: ret = np.tile(ret, (mh, 1)) - assert ret.shape[0] == mh + assert ( + ret.shape[0] == mh + ), "Channels of threshold matrix are not as expected (mh)" # distribute rows between PEs ret = interleave_matrix_outer_dim_from_partitions(ret, pe) - assert ret.shape[0] == pe - assert ret.shape[1] == tmem - assert ret.shape[2] == n_thres_steps + assert ( + ret.shape[0] == pe + ), """First dimension after distribution of the + rows between PEs is not as expected (pe)""" + assert ( + ret.shape[1] == tmem + ), """Second dimension after distribution of the + rows between PEs is not as expected (tmem)""" + assert ( + ret.shape[2] == n_thres_steps + ), """Third dimension after distribution of the + rows between PEs is not as expected (n_thres_steps)""" return ret.reshape(1, pe, tmem, n_thres_steps) - def generate_params(self, model): + def generate_params(self, model, path): + """Saves weights into params.h and if existing thresholds into thresh.h.""" + code_gen_dir = path # weights weights = model.get_initializer(self.onnx_node.input[1]) # convert weights into hlslib-compatible format @@ -283,7 +411,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): weight_tensor, export_wdt, "weights", True, True ) # write weights into params.h - code_gen_dir = self.get_nodeattr("code_gen_dir") + # code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") f_weights = open("{}/params.h".format(code_gen_dir), "w") if export_wdt.bitwidth() != 1: @@ -303,6 +431,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): ) f_weights.write(weight_hls_code) f_weights.close() + # thresholds if len(self.onnx_node.input) > 2: thresholds = model.get_initializer(self.onnx_node.input[2]) @@ -324,7 +453,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): threshold_tensor, tdt, "thresholds", False, True ) # write thresholds into thresh.h - code_gen_dir = self.get_nodeattr("code_gen_dir") + # code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") f_thresh = open("{}/thresh.h".format(code_gen_dir), "w") tdt_hls = tdt.get_hls_datatype_str() # use binary to export bipolar activations @@ -348,6 +477,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): f_thresh.close() def execute_node(self, context, graph): + mode = self.get_nodeattr("exec_mode") node = self.onnx_node mw = self.get_nodeattr("MW") mh = self.get_nodeattr("MH") @@ -357,7 +487,18 @@ class StreamingFCLayer_Batch(HLSCustomOp): nf = mh // pe # TODO ensure codegen dir exists - code_gen_dir = self.get_nodeattr("code_gen_dir") + if mode == "npysim": + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") + elif mode == "rtlsim": + code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen") + else: + raise Exception( + """Invalid value for attribute exec_mode! Is currently set to: {} + has to be set to one of the following value ("npysim", "rtlsim")""".format( + mode + ) + ) + # create a npy file fore each input of the node (in_ind is input index) in_ind = 0 for inputs in node.input: @@ -365,15 +506,18 @@ class StreamingFCLayer_Batch(HLSCustomOp): # the second input are the weights # the third input are the thresholds if in_ind == 0: - assert str(context[inputs].dtype) == "float32" + assert ( + str(context[inputs].dtype) == "float32" + ), """Input datatype is + not float32 as expected.""" expected_inp_shape = (1, sf, simd) reshaped_input = context[inputs].reshape(expected_inp_shape) - # flip SIMD (innermost) dimension of input tensor, there's some reversal - # going on somewhere with a mistmatch between npy and hls... - reshaped_input = np.flip(reshaped_input, -1) if self.get_input_datatype() == DataType.BIPOLAR: # store bipolar activations as binary reshaped_input = (reshaped_input + 1) / 2 + export_idt = DataType.BINARY + else: + export_idt = self.get_input_datatype() np.save( os.path.join(code_gen_dir, "input_{}.npy".format(in_ind)), reshaped_input, @@ -381,18 +525,73 @@ class StreamingFCLayer_Batch(HLSCustomOp): elif in_ind > 2: raise Exception("Unexpected input found for StreamingFCLayer") in_ind += 1 - # execute the precompiled model - super().exec_precompiled_singlenode_model() - # load output npy file - super().npy_to_dynamic_output(context) - # reinterpret binary output as bipolar where needed - if self.get_output_datatype() == DataType.BIPOLAR: - out = context[node.output[0]] - out = 2 * out - 1 - context[node.output[0]] = out - assert context[node.output[0]].shape == (1, nf, pe) - # reshape output to have expected shape - context[node.output[0]] = context[node.output[0]].reshape(1, mh) + + if mode == "npysim": + # execute the precompiled model + super().exec_precompiled_singlenode_model() + # load output npy file + super().npy_to_dynamic_output(context) + # reinterpret binary output as bipolar where needed + if self.get_output_datatype() == DataType.BIPOLAR: + out = context[node.output[0]] + out = 2 * out - 1 + context[node.output[0]] = out + assert context[node.output[0]].shape == ( + 1, + nf, + pe, + ), """Output shape is not + as expected (1, nf, pe)""" + # reshape output to have expected shape + context[node.output[0]] = context[node.output[0]].reshape(1, mh) + elif mode == "rtlsim": + prefixed_top_name = "%s_%s" % (node.name, node.name) + # check if needed file exists + verilog_file = "{}/project_{}/sol1/impl/verilog/{}.v".format( + code_gen_dir, node.name, prefixed_top_name + ) + if os.path.isfile(verilog_file): + nbits = self.get_instream_width() + inp = npy_to_rtlsim_input( + "{}/input_0.npy".format(code_gen_dir), export_idt, nbits + ) + sim = PyVerilator.build( + verilog_file, + verilog_path=[ + "{}/project_{}/sol1/impl/verilog/".format( + code_gen_dir, node.name + ) + ], + ) + super().reset_rtlsim(sim) + super().toggle_clk(sim) + output = self.rtlsim(sim, inp) + odt = self.get_output_datatype() + target_bits = odt.bitwidth() + packed_bits = self.get_outstream_width() + out_npy_path = "{}/output.npy".format(code_gen_dir) + rtlsim_output_to_npy( + output, out_npy_path, odt, (1, nf, pe), packed_bits, target_bits + ) + + # load and reshape output + output = np.load(out_npy_path) + output = np.asarray([output], dtype=np.float32).reshape(1, mh) + context[node.output[0]] = output + + else: + raise Exception( + """Found no verilog files for this node, + did you run the codegen_ipgen transformation?""" + ) + + else: + raise Exception( + """Invalid value for attribute exec_mode! Is currently set to: {} + has to be set to one of the following value ("npysim", "rtlsim")""".format( + mode + ) + ) def global_includes(self): self.code_gen_dict["$GLOBALS$"] = ['#include "weights.hpp"'] @@ -402,7 +601,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): # TODO find a better way of checking for no pregenerated thresholds self.code_gen_dict["$GLOBALS$"] += ['#include "thresh.h"'] - def defines(self): + def defines(self, var): numReps = 1 self.code_gen_dict["$DEFINES$"] = [ """#define MW1 {}\n #define MH1 {}\n #define SIMD1 {}\n @@ -417,9 +616,12 @@ class StreamingFCLayer_Batch(HLSCustomOp): numReps, ) ] + if var == "ipgen": + self.code_gen_dict["$DEFINES$"].append("#define PRAGMA_SUB(x) _Pragma (#x)") + self.code_gen_dict["$DEFINES$"].append("#define DO_PRAGMA(x) PRAGMA_SUB(x)") def read_npy_data(self): - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") dtype = self.get_input_datatype() if dtype == DataType.BIPOLAR: # use binary for bipolar storage @@ -431,8 +633,9 @@ class StreamingFCLayer_Batch(HLSCustomOp): npy_type = "float" npy_in = "%s/input_0.npy" % code_gen_dir self.code_gen_dict["$READNPYDATA$"] = [] + # note: the innermost dim is reversed for the input self.code_gen_dict["$READNPYDATA$"].append( - 'npy2apintstream<%s, %s, %d, %s>("%s", in0);' + 'npy2apintstream<%s, %s, %d, %s>("%s", in0, false);' % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in) ) @@ -466,7 +669,7 @@ class StreamingFCLayer_Batch(HLSCustomOp): ] def dataoutstrm(self): - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") dtype = self.get_output_datatype() if dtype == DataType.BIPOLAR: # use binary for bipolar storage @@ -481,8 +684,9 @@ class StreamingFCLayer_Batch(HLSCustomOp): shape = (1, nf, self.get_nodeattr("PE")) shape_cpp_str = str(shape).replace("(", "{").replace(")", "}") + # note: the innermost dim is not reversed for the output self.code_gen_dict["$DATAOUTSTREAM$"] = [ - 'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s");' + 'apintstream2npy<%s, %s, %d, %s>(out, %s, "%s", false);' % ( packed_hls_type, elem_hls_type, @@ -495,3 +699,56 @@ class StreamingFCLayer_Batch(HLSCustomOp): def save_as_npy(self): self.code_gen_dict["$SAVEASCNPY$"] = [] + + def blackboxfunction(self): + self.code_gen_dict["$BLACKBOXFUNCTION$"] = [ + """void {}(hls::stream<ap_uint<{}>> &in0, + hls::stream<ap_uint<{}>> &out + )""".format( + self.onnx_node.name, + self.get_instream_width(), + self.get_outstream_width(), + ) + ] + + def pragmas(self): + self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"] + self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out") + in_fifo_depth = self.get_nodeattr("inFIFODepth") + out_fifo_depth = self.get_nodeattr("outFIFODepth") + # insert depth pragmas only if specified + if in_fifo_depth != 0: + self.code_gen_dict["$PRAGMAS$"].append( + "#pragma HLS stream depth=%d variable=in0" % in_fifo_depth + ) + if out_fifo_depth != 0: + self.code_gen_dict["$PRAGMAS$"].append( + "#pragma HLS stream depth=%d variable=out" % out_fifo_depth + ) + self.code_gen_dict["$PRAGMAS$"].append( + "#pragma HLS INTERFACE ap_ctrl_none port=return" + ) + # the weight tensor is ap_uint<simd*prec> [PE][WMEM] + # partition for parallel access along the PE dimension (dim 1) + self.code_gen_dict["$PRAGMAS$"].append( + ( + "DO_PRAGMA(HLS ARRAY_PARTITION " + "variable=weights.m_weights complete dim=1)" + ) + ) + # the threshold tensor is acc_type [PE][TMEM][N_THRES] + # partition for parallel access along PE and N_THRES dimensions (dims 1 and 3) + if self.calc_tmem() != 0: + # TODO find a better way of checking for no pregenerated thresholds + self.code_gen_dict["$PRAGMAS$"].append( + ( + "DO_PRAGMA(HLS ARRAY_PARTITION variable=threshs.m_thresholds " + "complete dim=1)" + ) + ) + self.code_gen_dict["$PRAGMAS$"].append( + ( + "DO_PRAGMA(HLS ARRAY_PARTITION variable=threshs.m_thresholds " + "complete dim=3)" + ) + ) diff --git a/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py b/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py index 92f499b6771efe4455e7259a8eb62ab9c636cb1f..43951332d3637b548093958124735a45cb0edbc4 100644 --- a/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py +++ b/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py @@ -1,12 +1,39 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from finn.custom_op.fpgadataflow import HLSCustomOp class StreamingMaxPool_Batch(HLSCustomOp): + """Class that corresponds to finn-hlslib StreamingMaxPool_batch function.""" + def get_nodeattr_types(self): my_attrs = { - # "backend": ("s", True, "fpgadataflow"), - # "code_gen_dir": ("s", True, ""), - # "executable_path": ("s", True, ""), "ImgDim": ("i", True, 0), "PoolDim": ("i", True, 0), "NumChannels": ("i", True, 0), @@ -51,7 +78,7 @@ class StreamingMaxPool_Batch(HLSCustomOp): # verify that all necessary attributes exist try: - self.get_nodeattr("code_gen_dir") + self.get_nodeattr("code_gen_dir_npysim") self.get_nodeattr("executable_path") self.get_nodeattr("ImgDim") self.get_nodeattr("PoolDim") @@ -61,7 +88,7 @@ class StreamingMaxPool_Batch(HLSCustomOp): info_messages.append( """The necessary attributes do not exist. StreamingMaxPool_Batch needs the following attributes: - code_gen_dir, executable_path, ImgDim, PoolDim, NumChannels""" + code_gen_dir_npysim, executable_path, ImgDim, PoolDim, NumChannels""" ) # verify the number of inputs @@ -72,10 +99,19 @@ class StreamingMaxPool_Batch(HLSCustomOp): return info_messages + def get_number_output_values(self): + pass + + def bram_estimation(self): + pass + + def lut_estimation(self): + pass + def global_includes(self): self.code_gen_dict["$GLOBALS$"] = ['#include "maxpool.h"'] - def defines(self): + def defines(self, var): numReps = 2 self.code_gen_dict["$DEFINES$"] = [ """#define ImgDim {}\n #define PoolDim {}\n @@ -89,7 +125,7 @@ class StreamingMaxPool_Batch(HLSCustomOp): def read_npy_data(self): node = self.onnx_node - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") # c++ code to read out an npy file # and put it in hls::stream in the correct order self.code_gen_dict["$READNPYDATA$"] = [] @@ -188,7 +224,7 @@ class StreamingMaxPool_Batch(HLSCustomOp): self.code_gen_dict["$DATAOUTSTREAM$"].append("}") def save_as_npy(self): - code_gen_dir = self.get_nodeattr("code_gen_dir") + code_gen_dir = self.get_nodeattr("code_gen_dir_npysim") numReps = 1 self.code_gen_dict["$SAVEASCNPY$"] = [ """cnpy::npy_save("{}/output.npy",&output_data_vector[0], @@ -200,3 +236,9 @@ class StreamingMaxPool_Batch(HLSCustomOp): int(self.get_nodeattr("ImgDim") / self.get_nodeattr("PoolDim")), ) ] + + def blackboxfunction(self): + pass + + def pragmas(self): + pass diff --git a/src/finn/custom_op/fpgadataflow/templates.py b/src/finn/custom_op/fpgadataflow/templates.py new file mode 100644 index 0000000000000000000000000000000000000000..bad2ba2eff8ebba8e1a16f5e6e1174b3348c1e56 --- /dev/null +++ b/src/finn/custom_op/fpgadataflow/templates.py @@ -0,0 +1,104 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# template for single node execution +docompute_template = """ +#define AP_INT_MAX_W 4096 +#include "cnpy.h" +#include "npy2apintstream.hpp" +#include <vector> +#include "bnn-library.h" +// includes for network parameters +$GLOBALS$ + +// defines for network parameters +$DEFINES$ + +int main(){ + +$STREAMDECLARATIONS$ + +$READNPYDATA$ + +$DOCOMPUTE$ + +$DATAOUTSTREAM$ + +$SAVEASCNPY$ + +} + +""" + +# templates for single node ip generation + +# cpp file +ipgen_template = """ +#define AP_INT_MAX_W 4096 +#include "bnn-library.h" +// includes for network parameters +$GLOBALS$ + +// defines for network parameters +$DEFINES$ + +$BLACKBOXFUNCTION$ +{ +$PRAGMAS$ +$DOCOMPUTE$ +} +""" + +# tcl script +ipgentcl_template = """ +set config_proj_name $PROJECTNAME$ +puts "HLS project: $config_proj_name" +set config_hwsrcdir "$HWSRCDIR$" +puts "HW source dir: $config_hwsrcdir" +set config_proj_part "$FPGAPART$" + +set config_bnnlibdir "$FINNHLSLIBDIR$" + +set config_toplevelfxn "$TOPFXN$" +set config_clkperiod $CLKPERIOD$ + +open_project $config_proj_name +add_files $config_hwsrcdir/top_$TOPFXN$.cpp -cflags "-std=c++0x -I$config_bnnlibdir" + +set_top $config_toplevelfxn +open_solution sol1 +set_part $config_proj_part + +config_interface -m_axi_addr64 +config_rtl -auto_prefix + +create_clock -period $config_clkperiod -name default +csynth_design +export_design -format ip_catalog +exit 0 +""" diff --git a/src/finn/custom_op/fpgadataflow/tlastmarker.py b/src/finn/custom_op/fpgadataflow/tlastmarker.py new file mode 100644 index 0000000000000000000000000000000000000000..c0f599958affc9a3530431506a886919bc3117f1 --- /dev/null +++ b/src/finn/custom_op/fpgadataflow/tlastmarker.py @@ -0,0 +1,151 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from finn.custom_op.fpgadataflow import HLSCustomOp + + +class TLastMarker(HLSCustomOp): + """Class that corresponds to the TLastMarker node that needs to be + inserted at the end of the model for rtlsim with stitched IP. + It marks the end of the current image/input sample.""" + + def __init__(self, onnx_node): + super().__init__(onnx_node) + + def get_nodeattr_types(self): + my_attrs = { + "NumIters": ("i", True, 0), + # width of input-output data streams, in bits + "StreamWidth": ("i", True, 0), + # width of individual element in stream, in bits + "ElemWidth": ("i", True, 0), + } + my_attrs.update(super().get_nodeattr_types()) + return my_attrs + + def execute_node(self, context, graph): + # TLastMarker's behavior is only visible when doing + # rtlsim with stitched IP, since it marks the end + # of the current image/input sample. when executing + # inside FINN as a single node, this is not visible. + # so here we simply return the input as output + i_name = self.onnx_node.input[0] + o_name = self.onnx_node.output[0] + i_tensor = context[i_name] + context[o_name] = i_tensor + + def make_shape_compatible_op(self): + # not supported for shape inference + pass + + def infer_node_datatype(self, model): + # not supported for datatype inference + pass + + def verify_node(self): + # TODO implement verify_node for TLastMarker + pass + + def global_includes(self): + self.code_gen_dict["$GLOBALS$"] = ['#include "ap_axi_sdata.h"'] + + def defines(self, var): + stream_width = self.get_nodeattr("StreamWidth") + # output stream must have TLAST, so we use this stream data type: + # qdma_axis<stream_data_width,0,0,0 > + out_stream_dtype = "qdma_axis<%d,0,0,0>" % stream_width + self.code_gen_dict["$DEFINES$"] = [ + "#define StreamWidth %d" % stream_width, + "#define OutDType %s" % out_stream_dtype, + "#define NumIters %d" % self.get_nodeattr("NumIters"), + ] + + def read_npy_data(self): + self.code_gen_dict["$READNPYDATA$"] = [] + + def docompute(self): + self.code_gen_dict["$DOCOMPUTE$"] = [ + "for(int i=0; i<NumIters; i++) {", + "#pragma HLS PIPELINE II=1", + "OutDType t;", + "t.set_data(in0.read());", + "t.set_keep(-1);", + "t.set_last(i==(NumIters-1));", + "out.write(t);", + "}", + ] + + def dataoutstrm(self): + self.code_gen_dict["$DATAOUTSTREAM$"] = [] + + def save_as_npy(self): + self.code_gen_dict["$SAVEASCNPY$"] = [] + + def blackboxfunction(self): + self.code_gen_dict["$BLACKBOXFUNCTION$"] = [ + """void %s(hls::stream<ap_uint<StreamWidth> > &in0, + hls::stream<OutDType> &out)""" + % self.onnx_node.name + ] + + def pragmas(self): + self.code_gen_dict["$PRAGMAS$"] = ["#pragma HLS INTERFACE axis port=in0"] + self.code_gen_dict["$PRAGMAS$"].append("#pragma HLS INTERFACE axis port=out") + self.code_gen_dict["$PRAGMAS$"].append( + "#pragma HLS INTERFACE ap_ctrl_none port=return" + ) + + def get_number_output_values(self): + return self.get_nodeattr("NumIters") + + def get_folded_input_shape(self): + stream_width = self.get_nodeattr("StreamWidth") + elem_width = self.get_nodeattr("ElemWidth") + n_packed_elems = stream_width // elem_width + n_iters = self.get_nodeattr("NumIters") + return (1, n_iters, n_packed_elems) + + def get_folded_output_shape(self): + return self.get_folded_input_shape() + + def get_instream_width(self): + stream_width = self.get_nodeattr("StreamWidth") + return stream_width + + def get_outstream_width(self): + stream_width = self.get_nodeattr("StreamWidth") + return stream_width + + def strm_decl(self): + self.code_gen_dict["$STREAMDECLARATIONS$"] = [] + self.code_gen_dict["$STREAMDECLARATIONS$"].append( + 'hls::stream<ap_uint<{}>> in0 ("in0");'.format(self.get_instream_width()) + ) + self.code_gen_dict["$STREAMDECLARATIONS$"].append( + 'hls::stream<OutDType> out ("out");' + ) diff --git a/src/finn/custom_op/multithreshold.py b/src/finn/custom_op/multithreshold.py index 52cf2504b174b06df1ba0aa0bdac112fee872b91..73bdbc4177867350eecf75cef0943b01522e8508 100644 --- a/src/finn/custom_op/multithreshold.py +++ b/src/finn/custom_op/multithreshold.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np import onnx.helper as helper @@ -6,6 +34,9 @@ from finn.custom_op import CustomOp def compare(x, y): + """Comparison helper function for multithresholding. + + Gets two values and returns 1.0 if x>=y otherwise 0.0.""" if x >= y: return 1.0 else: @@ -13,6 +44,12 @@ def compare(x, y): def multithreshold(v, thresholds, out_scale=None, out_bias=None): + """Given a set of threshold values t={t_0, t_1 ... t_n} the successive + thresholding maps any real number x to an integer in the interval [0, n], + where the returned integer is the number of thresholds x is greater than + or equal to. + + The output tensor will be scaled by out_scale and biased by out_bias.""" # the inputs are expected to be in the shape (N,C,H,W) # N : Batch size # C : Number of channels @@ -27,7 +64,10 @@ def multithreshold(v, thresholds, out_scale=None, out_bias=None): # the output tensor will be scaled by out_scale and biased by out_bias # assert threshold shape is_global_threshold = thresholds.shape[0] == 1 - assert (v.shape[1] == thresholds.shape[0]) or is_global_threshold + assert ( + v.shape[1] == thresholds.shape[0] + ) or is_global_threshold, """"Threshold + shape incorrect""" # save the required shape sizes for the loops (N, C and B) num_batch = v.shape[0] num_channel = v.shape[1] @@ -57,6 +97,8 @@ def multithreshold(v, thresholds, out_scale=None, out_bias=None): class MultiThreshold(CustomOp): + """Class that corresponds to a multithresholding node.""" + def get_nodeattr_types(self): return { "out_dtype": ("s", True, ""), diff --git a/src/finn/custom_op/registry.py b/src/finn/custom_op/registry.py index fdc4ab6e227c25b94bd5e333dc12d73e7411b681..305618ab1bb9e64f761fd8498ede815a1c85d47c 100644 --- a/src/finn/custom_op/registry.py +++ b/src/finn/custom_op/registry.py @@ -1,12 +1,42 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # make sure new CustomOp subclasses are imported here so that they get # registered and plug in correctly into the infrastructure from finn.custom_op.fpgadataflow.convolutioninputgenerator import ( - ConvolutionInputGenerator + ConvolutionInputGenerator, ) from finn.custom_op.fpgadataflow.streamingfclayer_batch import StreamingFCLayer_Batch from finn.custom_op.fpgadataflow.streamingmaxpool_batch import StreamingMaxPool_Batch from finn.custom_op.im2col import Im2Col +from finn.custom_op.fpgadataflow.tlastmarker import TLastMarker from finn.custom_op.multithreshold import MultiThreshold +from finn.custom_op.streamingdataflowpartition import StreamingDataflowPartition from finn.custom_op.xnorpopcount import XnorPopcountMatMul # create a mapping of all known CustomOp names and classes @@ -18,3 +48,17 @@ custom_op["Im2Col"] = Im2Col custom_op["StreamingMaxPool_Batch"] = StreamingMaxPool_Batch custom_op["StreamingFCLayer_Batch"] = StreamingFCLayer_Batch custom_op["ConvolutionInputGenerator"] = ConvolutionInputGenerator +custom_op["TLastMarker"] = TLastMarker +custom_op["StreamingDataflowPartition"] = StreamingDataflowPartition + + +def getCustomOp(node): + "Return a FINN CustomOp instance for the given ONNX node, if it exists." + op_type = node.op_type + try: + # lookup op_type in registry of CustomOps + inst = custom_op[op_type](node) + return inst + except KeyError: + # exception if op_type is not supported + raise Exception("Custom op_type %s is currently not supported." % op_type) diff --git a/src/finn/custom_op/streamingdataflowpartition.py b/src/finn/custom_op/streamingdataflowpartition.py new file mode 100644 index 0000000000000000000000000000000000000000..586537460f9bf4a10bb581218a745c4a99fba6f6 --- /dev/null +++ b/src/finn/custom_op/streamingdataflowpartition.py @@ -0,0 +1,93 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from finn.custom_op import CustomOp + + +class StreamingDataflowPartition(CustomOp): + """Class that corresponds to the meta/container node StreamingDataflowPartition + which is a placeholder for a group of fpgadataflow nodes that have been separated + out into a FINN-ONNX model of its own. Note that is does not produce any HLS or + bitfile by itself.""" + + def get_nodeattr_types(self): + return { + "model": ("s", True, ""), + } + + def make_shape_compatible_op(self): + pass + + def infer_node_datatype(self, model): + pass + + def execute_node(self, context, graph): + # TODO add RPC execution with synthesized bitfile? + # whole-design rtlsim with PyVerilator may also be an alternative + pass + + def verify_node(self): + info_messages = [] + + # verify number of attributes + num_of_attr = 1 + if len(self.onnx_node.attribute) == num_of_attr: + info_messages.append("The number of attributes is correct") + else: + info_messages.append( + """The number of attributes is incorrect, + {} should have {} attributes""".format( + self.onnx_node.op_type, num_of_attr + ) + ) + + # verify that "domain" is set to "finn" + domain_value = self.onnx_node.domain + if domain_value == "finn": + info_messages.append("Attribute domain is set correctly") + else: + info_messages.append('Attribute domain should be set to "finn"') + + # verify that all necessary attributes exist + try: + self.get_nodeattr("model") + info_messages.append("All necessary attributes exist") + except Exception: + info_messages.append( + """The necessary attributes do not exist. + StreamingDataflowPartition needs the following attribute(s): + model""" + ) + + # verify the number of inputs + if len(self.onnx_node.input) == 1: + info_messages.append("The number of inputs is correct") + else: + info_messages.append("StreamingDataflowPartition needs 1 data input") + + return info_messages diff --git a/src/finn/custom_op/xnorpopcount.py b/src/finn/custom_op/xnorpopcount.py index 15ec57a002148a4e9213d28fe4f68c5a78837ab6..511a120b1c3894595dc068995d14862b54a73c7a 100644 --- a/src/finn/custom_op/xnorpopcount.py +++ b/src/finn/custom_op/xnorpopcount.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np import onnx.helper as helper @@ -6,15 +34,15 @@ from finn.custom_op import CustomOp def xnorpopcountmatmul(inp0, inp1): + """Simulates XNOR-popcount matrix multiplication as a regular bipolar + matrix multiplication followed by some post processing.""" # extract the operand shapes (M, K0) = inp0.shape (K1, N) = inp1.shape # make sure shapes are compatible with matmul - assert K0 == K1 + assert K0 == K1, "Matrix shapes are not compatible with matmul." K = K0 - # we simulate XNOR-popcount matrix multiplication as a regular bipolar - # matrix multiplication followed by some post processing - # first, convert binary inputs to bipolar + # convert binary inputs to bipolar inp0_bipolar = 2.0 * inp0 - 1.0 inp1_bipolar = 2.0 * inp1 - 1.0 # call regular numpy matrix multiplication @@ -31,6 +59,9 @@ def xnorpopcountmatmul(inp0, inp1): class XnorPopcountMatMul(CustomOp): + """Class that corresponds to a XNOR-popcount matrix + multiplication node.""" + def get_nodeattr_types(self): return {} @@ -43,8 +74,14 @@ class XnorPopcountMatMul(CustomOp): def infer_node_datatype(self, model): node = self.onnx_node # ensure inputs are binary - assert model.get_tensor_datatype(node.input[0]) == DataType["BINARY"] - assert model.get_tensor_datatype(node.input[1]) == DataType["BINARY"] + assert ( + model.get_tensor_datatype(node.input[0]) == DataType["BINARY"] + ), """FINN + DataType of first input is not set to BINARY as it should be.""" + assert ( + model.get_tensor_datatype(node.input[1]) == DataType["BINARY"] + ), """FINN + DataTypes of second input is not set to BINARY as it should be.""" # XNOR-popcount produces unsigned integers, assume uint32 model.set_tensor_datatype(node.output[0], DataType["UINT32"]) diff --git a/src/finn/data/cpp/npy2apintstream.hpp b/src/finn/data/cpp/npy2apintstream.hpp index f58566fb1783bbdf1e0cdbb2f69c6bd17d916e57..b1c563d1bc33bbcd1a17f49fdc644be16f5f9730 100644 --- a/src/finn/data/cpp/npy2apintstream.hpp +++ b/src/finn/data/cpp/npy2apintstream.hpp @@ -13,7 +13,7 @@ #endif template <typename PackedT, typename ElemT, int ElemBits, typename NpyT> -void npy2apintstream(const char * npy_path, hls::stream<PackedT> & out_stream) { +void npy2apintstream(const char * npy_path, hls::stream<PackedT> & out_stream, bool reverse_inner = true) { cnpy::NpyArray arr = cnpy::npy_load(npy_path); DEBUG_NPY2APINTSTREAM("word_size " << arr.word_size << " num_vals " << arr.num_vals) if(arr.word_size != sizeof(NpyT)) { @@ -28,7 +28,8 @@ void npy2apintstream(const char * npy_path, hls::stream<PackedT> & out_stream) { DEBUG_NPY2APINTSTREAM("n_outer " << outer_dim_elems << " n_inner " << inner_dim_elems) for(size_t outer_elem = 0; outer_elem < outer_dim_elems; outer_elem++) { PackedT packed_elem = 0; - for(size_t i = 0; i < inner_dim_elems; i++) { + for(size_t ii = 0; ii < inner_dim_elems; ii++) { + size_t i = reverse_inner ? inner_dim_elems-ii-1 : ii; NpyT loaded_elem_npyt = *loaded_data; ElemT loaded_elem = (ElemT) loaded_elem_npyt; DEBUG_NPY2APINTSTREAM("NpyT " << loaded_elem_npyt << " elem " << loaded_elem) @@ -41,19 +42,20 @@ void npy2apintstream(const char * npy_path, hls::stream<PackedT> & out_stream) { } template <typename PackedT, typename ElemT, int ElemBits, typename NpyT> -void apintstream2npy(hls::stream<PackedT> & in_stream, const std::vector<size_t> & shape, const char * npy_path) { +void apintstream2npy(hls::stream<PackedT> & in_stream, const std::vector<size_t> & shape, const char * npy_path, bool reverse_inner = true) { std::vector<NpyT> data_to_save; size_t outer_dim_elems = 1; for(size_t dim = 0; dim < shape.size()-1; dim++) { outer_dim_elems *= shape[dim]; } size_t inner_dim_elems = shape[shape.size()-1]; - DEBUG_NPY2APINTSTREAM("n_outer " << outer_dim_elems << " n_inner " << inner_dim_elems) + DEBUG_APINTSTREAM2NPY("n_outer " << outer_dim_elems << " n_inner " << inner_dim_elems) for(size_t outer_elem = 0; outer_elem < outer_dim_elems; outer_elem++) { PackedT packed_elem; in_stream >> packed_elem; - DEBUG_NPY2APINTSTREAM("packed hls elem " << std::hex << packed_elem << std::dec) - for(size_t i = 0; i < inner_dim_elems; i++) { + DEBUG_APINTSTREAM2NPY("packed hls elem " << std::hex << packed_elem << std::dec) + for(size_t ii = 0; ii < inner_dim_elems; ii++) { + size_t i = reverse_inner ? inner_dim_elems-ii-1 : ii; ElemT elem = packed_elem((i+1)*ElemBits-1, i*ElemBits); NpyT npyt = (NpyT) elem; DEBUG_APINTSTREAM2NPY("elem " << elem << " NpyT " << npyt) diff --git a/src/finn/data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx b/src/finn/data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx new file mode 100644 index 0000000000000000000000000000000000000000..aada6f07e9d3910122d2eb357d8a8c1224e9fbab Binary files /dev/null and b/src/finn/data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx differ diff --git a/src/finn/transformation/__init__.py b/src/finn/transformation/__init__.py index 3ddce04c11db01b2f6722bc843f0107621630936..a4e0bcf330a8ad1797eb76e61ba63511eb903dcf 100644 --- a/src/finn/transformation/__init__.py +++ b/src/finn/transformation/__init__.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + """ Guide to writing FINN transformations ------------------------------------- @@ -23,6 +51,9 @@ from abc import ABC, abstractmethod class Transformation(ABC): + """Transformation class all transformations are based on. Contains only + abstract method apply() every transformation has to fill.""" + def __init__(self): super().__init__() diff --git a/src/finn/transformation/batchnorm_to_affine.py b/src/finn/transformation/batchnorm_to_affine.py index 655ddd9842f37d59155aa0b12edeffecd89d65c1..77657cf5e2ef14e38aa817e895488fd6dd310cde 100644 --- a/src/finn/transformation/batchnorm_to_affine.py +++ b/src/finn/transformation/batchnorm_to_affine.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import TensorProto from onnx import helper as oh @@ -67,8 +95,10 @@ class BatchNormToAffine(Transformation): # remove old nodes graph.node.remove(n) if consumer is not None: - graph.node.remove(consumer) + if consumer.op_type == "Squeeze": + graph.node.remove(consumer) if producer is not None: - graph.node.remove(producer) + if producer.op_type == "Unsqueeze": + graph.node.remove(producer) model = model.transform(InferShapes()) return (model, graph_modified) diff --git a/src/finn/transformation/bipolar_to_xnor.py b/src/finn/transformation/bipolar_to_xnor.py index ad785699f607cab0608884828795cd0f5e7017d3..8e8633f3836ec15c35c769e0ed6d6829fed37538 100644 --- a/src/finn/transformation/bipolar_to_xnor.py +++ b/src/finn/transformation/bipolar_to_xnor.py @@ -1,11 +1,39 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import TensorProto from onnx import helper as oh from finn.core.datatype import DataType -from finn.core.utils import get_by_name from finn.transformation import Transformation from finn.transformation.infer_shapes import InferShapes +from finn.util.basic import get_by_name class ConvertBipolarMatMulToXnorPopcount(Transformation): diff --git a/src/finn/transformation/fold_constants.py b/src/finn/transformation/fold_constants.py index 5b27d906cc8ee4cbcaf7363001eb7297b5c21000..a73035e571fe0ce0425c3d8da288f755984268d4 100644 --- a/src/finn/transformation/fold_constants.py +++ b/src/finn/transformation/fold_constants.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import finn.core.onnx_exec as oxe from finn.transformation import Transformation from finn.transformation.infer_shapes import InferShapes diff --git a/src/finn/transformation/fpgadataflow/__init__.py b/src/finn/transformation/fpgadataflow/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..83c8e8bed70797f7d6c0138968f750f72e790386 100644 --- a/src/finn/transformation/fpgadataflow/__init__.py +++ b/src/finn/transformation/fpgadataflow/__init__.py @@ -0,0 +1,27 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/finn/transformation/fpgadataflow/cleanup.py b/src/finn/transformation/fpgadataflow/cleanup.py index 1632d3443a3bf79e55a4b877ae182964ff7caaed..e1bf53f7ef53c986fffe3dcc507e6886660eb611 100644 --- a/src/finn/transformation/fpgadataflow/cleanup.py +++ b/src/finn/transformation/fpgadataflow/cleanup.py @@ -1,8 +1,36 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os import shutil -import finn.core.utils as util import finn.custom_op.registry as registry +import finn.util.basic as util from finn.transformation import Transformation @@ -13,6 +41,16 @@ class CleanUp(Transformation): super().__init__() def apply(self, model): + # delete PYNQ project, if any + vivado_pynq_proj_dir = model.get_metadata_prop("vivado_pynq_proj") + if vivado_pynq_proj_dir is not None and os.path.isdir(vivado_pynq_proj_dir): + shutil.rmtree(vivado_pynq_proj_dir) + model.set_metadata_prop("vivado_pynq_proj", "") + # delete IP stitching project, if any + ipstitch_path = model.get_metadata_prop("vivado_stitch_proj") + if ipstitch_path is not None and os.path.isdir(ipstitch_path): + shutil.rmtree(ipstitch_path) + model.set_metadata_prop("vivado_stitch_proj", "") for node in model.graph.node: op_type = node.op_type if node.domain == "finn": @@ -22,11 +60,26 @@ class CleanUp(Transformation): try: # lookup op_type in registry of CustomOps inst = registry.custom_op[op_type](node) - code_gen_dir = inst.get_nodeattr("code_gen_dir") + # delete code_gen_dir from npysim + code_gen_dir = inst.get_nodeattr("code_gen_dir_npysim") if os.path.isdir(code_gen_dir): shutil.rmtree(code_gen_dir) - inst.set_nodeattr("code_gen_dir", "") + inst.set_nodeattr("code_gen_dir_npysim", "") inst.set_nodeattr("executable_path", "") + # delete code_gen_dir from ipgen and project folder + code_gen_dir = inst.get_nodeattr("code_gen_dir_ipgen") + ipgen_path = inst.get_nodeattr("ipgen_path") + if os.path.isdir(code_gen_dir): + shutil.rmtree(code_gen_dir) + if os.path.isdir(ipgen_path): + shutil.rmtree(ipgen_path) + inst.set_nodeattr("code_gen_dir_ipgen", "") + inst.set_nodeattr("ipgen_path", "") + # delete Java HotSpot Performance data log + for d_name in os.listdir("/tmp/"): + if "hsperfdata" in d_name: + shutil.rmtree("/tmp/" + str(d_name)) + except KeyError: # exception if op_type is not supported raise Exception( diff --git a/src/finn/transformation/fpgadataflow/codegen.py b/src/finn/transformation/fpgadataflow/codegen.py deleted file mode 100644 index 84078d90a573faf4d014c5e280e22e41061f0aff..0000000000000000000000000000000000000000 --- a/src/finn/transformation/fpgadataflow/codegen.py +++ /dev/null @@ -1,41 +0,0 @@ -import os -import tempfile as tmp - -import finn.custom_op.registry as registry -from finn.core.utils import get_by_name -from finn.transformation import Transformation - - -def _codegen_single_node(node, model): - """Call custom implementation to generate code for single custom node - and create folder that contains all the generated files""" - op_type = node.op_type - try: - # lookup op_type in registry of CustomOps - inst = registry.custom_op[op_type](node) - # get the path of the code generation directory - code_gen_dir = inst.get_nodeattr("code_gen_dir") - # ensure that there is a directory - if code_gen_dir == "" or not os.path.isdir(code_gen_dir): - code_gen_dir = tmp.mkdtemp(prefix="code_gen_" + str(node.op_type) + "_") - inst.set_nodeattr("code_gen_dir", code_gen_dir) - # ensure that there is generated code inside the dir - inst.code_generation(model) - except KeyError: - # exception if op_type is not supported - raise Exception("Custom op_type %s is currently not supported." % op_type) - - -class CodeGen(Transformation): - """Code generation for all nodes in model""" - - def apply(self, model): - for node in model.graph.node: - if node.domain == "finn": - backend_attribute = get_by_name(node.attribute, "backend") - if backend_attribute is None: - continue - backend_value = backend_attribute.s.decode("UTF-8") - if backend_value == "fpgadataflow": - _codegen_single_node(node, model) - return (model, False) diff --git a/src/finn/transformation/fpgadataflow/codegen_ipgen.py b/src/finn/transformation/fpgadataflow/codegen_ipgen.py new file mode 100644 index 0000000000000000000000000000000000000000..ab3b4e820e04ff1f0a02b6b95254b5fe8b45de91 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/codegen_ipgen.py @@ -0,0 +1,87 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os + +import finn.custom_op.registry as registry +from finn.transformation import Transformation +from finn.util.basic import get_by_name, make_build_dir + + +def _codegen_single_node(node, model, fpgapart, clk): + """Calls C++ code generation for one node. Resulting code can be used + to generate a Vivado IP block for the node.""" + + op_type = node.op_type + try: + # lookup op_type in registry of CustomOps + inst = registry.custom_op[op_type](node) + # get the path of the code generation directory + code_gen_dir = inst.get_nodeattr("code_gen_dir_ipgen") + # ensure that there is a directory + if code_gen_dir == "" or not os.path.isdir(code_gen_dir): + code_gen_dir = make_build_dir( + prefix="code_gen_ipgen_" + str(node.op_type) + "_" + ) + inst.set_nodeattr("code_gen_dir_ipgen", code_gen_dir) + # ensure that there is generated code inside the dir + inst.code_generation_ipgen(model, fpgapart, clk) + except KeyError: + # exception if op_type is not supported + raise Exception("Custom op_type %s is currently not supported." % op_type) + + +class CodeGen_ipgen(Transformation): + """Call custom implementation to generate code for single custom node + and create folder that contains all the generated files. + All nodes in the graph must have the fpgadataflow backend attribute and + transformation gets additional arguments: + + * fpgapart (string) + + * clk in ns (int) + + Outcome if succesful: Node attribute "code_gen_dir_ipgen" contains path to folder + that contains generated C++ code that can be used to generate a Vivado IP block. + The subsequent transformation is HLSSynth_IPGen""" + + def __init__(self, fpgapart, clk): + super().__init__() + self.fpgapart = fpgapart + self.clk = clk + + def apply(self, model): + for node in model.graph.node: + if node.domain == "finn": + backend_attribute = get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + _codegen_single_node(node, model, self.fpgapart, self.clk) + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/codegen_ipstitch.py b/src/finn/transformation/fpgadataflow/codegen_ipstitch.py new file mode 100644 index 0000000000000000000000000000000000000000..fcb4af37c951de3869b731e755ef48ba4fdb579f --- /dev/null +++ b/src/finn/transformation/fpgadataflow/codegen_ipstitch.py @@ -0,0 +1,200 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess + +from finn.transformation import Transformation +from finn.util.basic import get_by_name, make_build_dir + + +class CodeGen_ipstitch(Transformation): + """Create a Vivado IP Block Design project from all the generated IPs of a + graph. All nodes in the graph must have the fpgadataflow backend attribute, + and the CodeGen_ipgen transformation must have been previously run on + the graph. The resulting block design is also packaged as IP. The + transformation gets the fpgapart as a string. + + Outcome if successful: sets the vivado_stitch_proj attribute in the ONNX + ModelProto's metadata_props field, with the created project dir as the + value. A make_project.tcl script is also placed under the same folder, + which is called to instantiate the per-layer IPs and stitch them together. + The packaged block design IP can be found under the ip subdirectory. + """ + + def __init__(self, fpgapart): + super().__init__() + self.fpgapart = fpgapart + + def apply(self, model): + ip_dirs = ["list"] + create_cmds = [] + connect_cmds = [] + # ensure that all nodes are fpgadataflow, and that IPs are generated + for node in model.graph.node: + assert node.domain == "finn", 'Node domain is not set to "finn"' + backend_attribute = get_by_name(node.attribute, "backend") + assert backend_attribute is not None, "Backend node attribute is not set." + backend_value = backend_attribute.s.decode("UTF-8") + assert ( + backend_value == "fpgadataflow" + ), """Backend node attribute is not + set to "fpgadataflow".""" + ip_dir_attribute = get_by_name(node.attribute, "ipgen_path") + assert ( + ip_dir_attribute is not None + ), """Node attribute "ipgen_path" is not set. + Please run transformation CodeGen_ipgen first.""" + ip_dir_value = ip_dir_attribute.s.decode("UTF-8") + ip_dir_value += "/sol1/impl/ip" + assert os.path.isdir(ip_dir_value), "IP generation directory doesn't exist." + ip_dirs += [ip_dir_value] + vlnv = "xilinx.com:hls:%s:1.0" % node.name + inst_name = node.name + create_cmd = "create_bd_cell -type ip -vlnv %s %s" % (vlnv, inst_name) + create_cmds += [create_cmd] + # TODO nonlinear topologies: check this for all inputs + my_producer = model.find_producer(node.input[0]) + if my_producer is None: + # first node in graph + # make clock and reset external + connect_cmds.append( + "make_bd_pins_external [get_bd_pins %s/ap_clk]" % inst_name + ) + connect_cmds.append( + "make_bd_pins_external [get_bd_pins %s/ap_rst_n]" % inst_name + ) + # make input external + connect_cmds.append( + "make_bd_intf_pins_external [get_bd_intf_pins %s/in0_V_V]" + % inst_name + ) + else: + # intermediate node + # wire up global clock and reset + connect_cmds.append( + "connect_bd_net [get_bd_ports ap_rst_n_0] [get_bd_pins %s/ap_rst_n]" + % inst_name + ) + connect_cmds.append( + "connect_bd_net [get_bd_ports ap_clk_0] [get_bd_pins %s/ap_clk]" + % inst_name + ) + # wire up input to previous output + # TODO nonlinear topologies: loop over all inputs + my_in_name = "%s/in0_V_V" % (inst_name) + prev_out_name = "%s/out_V_V" % (my_producer.name) + connect_cmds.append( + "connect_bd_intf_net [get_bd_intf_pins %s] [get_bd_intf_pins %s]" + % (prev_out_name, my_in_name) + ) + if model.find_consumer(node.output[0]) is None: + # last node in graph + # ensure it is a TLastMarker to have a valid TLast signal + assert ( + node.op_type == "TLastMarker" + ), """Last node is not TLastMarker. + Please run transformation InsertTLastMarker to ensure a valid + TLast signal""" + # make output external + connect_cmds.append( + "make_bd_intf_pins_external [get_bd_intf_pins %s/out_r]" % inst_name + ) + + # create a temporary folder for the project + prjname = "finn_vivado_stitch_proj" + vivado_stitch_proj_dir = make_build_dir(prefix="vivado_stitch_proj_") + model.set_metadata_prop("vivado_stitch_proj", vivado_stitch_proj_dir) + # start building the tcl script + tcl = [] + # create vivado project + tcl.append( + "create_project %s %s -part %s" + % (prjname, vivado_stitch_proj_dir, self.fpgapart) + ) + # add all the generated IP dirs to ip_repo_paths + ip_dirs_str = " ".join(ip_dirs) + tcl.append("set_property ip_repo_paths [%s] [current_project]" % ip_dirs_str) + tcl.append("update_ip_catalog") + # create block design and instantiate all layers + block_name = "finn_design" + tcl.append('create_bd_design "%s"' % block_name) + tcl.extend(create_cmds) + tcl.extend(connect_cmds) + tcl.append("regenerate_bd_layout") + tcl.append("validate_bd_design") + tcl.append("save_bd_design") + # export block design itself as an IP core + block_vendor = "xilinx_finn" + block_library = "finn" + block_vlnv = "%s:%s:%s:1.0" % (block_vendor, block_library, block_name) + model.set_metadata_prop("vivado_stitch_vlnv", block_vlnv) + tcl.append( + ( + "ipx::package_project -root_dir %s/ip -vendor %s " + "-library %s -taxonomy /UserIP -module %s -import_files" + ) + % (vivado_stitch_proj_dir, block_vendor, block_library, block_name) + ) + tcl.append("set_property core_revision 2 [ipx::find_open_core %s]" % block_vlnv) + tcl.append("ipx::create_xgui_files [ipx::find_open_core %s]" % block_vlnv) + tcl.append("ipx::update_checksums [ipx::find_open_core %s]" % block_vlnv) + tcl.append("ipx::save_core [ipx::find_open_core %s]" % block_vlnv) + # create wrapper hdl (for rtlsim later on) + bd_base = "%s/%s.srcs/sources_1/bd/%s" % ( + vivado_stitch_proj_dir, + prjname, + block_name, + ) + bd_filename = "%s/%s.bd" % (bd_base, block_name) + tcl.append("make_wrapper -files [get_files %s] -top" % bd_filename) + wrapper_filename = "%s/hdl/%s_wrapper.v" % (bd_base, block_name) + tcl.append("add_files -norecurse %s" % wrapper_filename) + model.set_metadata_prop("wrapper_filename", wrapper_filename) + # export list of used Verilog files (for rtlsim later on) + tcl.append("set all_v_files [get_files -filter {FILE_TYPE == Verilog}]") + v_file_list = "%s/all_verilog_srcs.txt" % vivado_stitch_proj_dir + tcl.append("set fp [open %s w]" % v_file_list) + tcl.append("puts $fp $all_v_files") + tcl.append("close $fp") + # write the project creator tcl script + tcl_string = "\n".join(tcl) + "\n" + with open(vivado_stitch_proj_dir + "/make_project.tcl", "w") as f: + f.write(tcl_string) + # create a shell script and call Vivado + make_project_sh = vivado_stitch_proj_dir + "/make_project.sh" + working_dir = os.environ["PWD"] + with open(make_project_sh, "w") as f: + f.write("#!/bin/bash \n") + f.write("cd {}\n".format(vivado_stitch_proj_dir)) + f.write("vivado -mode batch -source make_project.tcl\n") + f.write("cd {}\n".format(working_dir)) + bash_command = ["bash", make_project_sh] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/codegen_npysim.py b/src/finn/transformation/fpgadataflow/codegen_npysim.py new file mode 100644 index 0000000000000000000000000000000000000000..d2862d82cf76f62bc236ace9d44c607dd2ab86ff --- /dev/null +++ b/src/finn/transformation/fpgadataflow/codegen_npysim.py @@ -0,0 +1,77 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os + +import finn.custom_op.registry as registry +from finn.transformation import Transformation +from finn.util.basic import get_by_name, make_build_dir + + +def _codegen_single_node(node, model): + """Calls C++ code generation for one node. Resulting code can be used + to simulate node using npysim.""" + + op_type = node.op_type + try: + # lookup op_type in registry of CustomOps + inst = registry.custom_op[op_type](node) + # get the path of the code generation directory + code_gen_dir = inst.get_nodeattr("code_gen_dir_npysim") + # ensure that there is a directory + if code_gen_dir == "" or not os.path.isdir(code_gen_dir): + code_gen_dir = make_build_dir( + prefix="code_gen_npysim_" + str(node.op_type) + "_" + ) + inst.set_nodeattr("code_gen_dir_npysim", code_gen_dir) + # ensure that there is generated code inside the dir + inst.code_generation_npysim(model) + except KeyError: + # exception if op_type is not supported + raise Exception("Custom op_type %s is currently not supported." % op_type) + + +class CodeGen_npysim(Transformation): + """Call custom implementation to generate code for single custom node + and create folder that contains all the generated files. + All nodes in the graph must have the fpgadataflow backend attribute. + + Outcome if succesful: Node attribute "code_gen_dir_npysim" contains path to folder + that contains generated C++ code that can be used to simulate node using npysim. + The subsequent transformation is Compile""" + + def apply(self, model): + for node in model.graph.node: + if node.domain == "finn": + backend_attribute = get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + _codegen_single_node(node, model) + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/compile.py b/src/finn/transformation/fpgadataflow/compile.py index 37df1c61dfc101111b1ab8623dcee9a5f1697489..e577c3af6d2b92d8a2c63e89e3b1bca21d3d7c0a 100644 --- a/src/finn/transformation/fpgadataflow/compile.py +++ b/src/finn/transformation/fpgadataflow/compile.py @@ -1,10 +1,44 @@ -import finn.core.utils as util +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import finn.custom_op.registry as registry +import finn.util.basic as util from finn.transformation import Transformation class Compile(Transformation): - """Compile for all nodes in model""" + """For every node: compile C++ code in node attribute "code_gen_dir_npysim" + and save path to executables in node attribute "executable_path". + All nodes in the graph must have the fpgadataflow backend attribute. + + To use these executables, exec_mode must be set to "npysim" (using transformation + SetExecMode) and the model has to be executed using execute_onnx() from + finn.core.onnx_exec""" def __init__(self): super().__init__() @@ -22,11 +56,19 @@ class Compile(Transformation): # lookup op_type in registry of CustomOps inst = registry.custom_op[op_type](node) # ensure that code is generated - assert inst.get_nodeattr("code_gen_dir") != "" + assert ( + inst.get_nodeattr("code_gen_dir_npysim") != "" + ), """Node + attribute "code_gen_dir_npysim" is not set. Please run + Transformation CodeGen_npysim first.""" # call the compilation function for this node inst.compile_singlenode_code() # ensure that executable path is now set - assert inst.get_nodeattr("executable_path") != "" + assert ( + inst.get_nodeattr("executable_path") != "" + ), """Transformation + compile was not successful, there is no path to executables set + in node attribute "executable_path".""" except KeyError: # exception if op_type is not supported raise Exception( diff --git a/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py b/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py index 56f3c0a8bb60ad5fba72e290177c10405a54f2d1..1cc42badc7c6f9ce88d802134596320c84f5ab4f 100644 --- a/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py +++ b/src/finn/transformation/fpgadataflow/convert_to_hls_layers.py @@ -1,11 +1,40 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from onnx import helper from finn.core.datatype import DataType from finn.transformation import Transformation +from finn.custom_op.registry import getCustomOp class InferBinaryStreamingFCLayer(Transformation): - """Convert pairs of binary XnorPopcountMatMul layers to + """Convert XnorPopcountMatMul layers to StreamingFCLayer_Batch layers. Any immediately following MultiThreshold layers will also be absorbed into the MVTU.""" @@ -18,8 +47,14 @@ class InferBinaryStreamingFCLayer(Transformation): if n.op_type == "XnorPopcountMatMul": mm_input = n.input[0] mm_weight = n.input[1] - assert model.get_tensor_datatype(mm_input) == DataType.BINARY - assert model.get_tensor_datatype(mm_weight) == DataType.BINARY + assert ( + model.get_tensor_datatype(mm_input) == DataType.BINARY + ), """First + input for xnorpopcount is not set to FINN DataType BINARY.""" + assert ( + model.get_tensor_datatype(mm_weight) == DataType.BINARY + ), """Second + input (weights) for xnorpopcount is not set to FINN DataType BINARY.""" idt = DataType.BINARY wdt = DataType.BINARY mm_output = n.output[0] @@ -33,10 +68,13 @@ class InferBinaryStreamingFCLayer(Transformation): # create node with no parallelization first pe = 1 simd = 1 - assert mh % pe == 0 - assert mw % simd == 0 + assert mh % pe == 0, "Requirement MH divisable by PE is violated." + assert mw % simd == 0, "Requirement MW divisable by SIMD is violated." wmem = mw * mh // (pe * simd) - assert mw * mh == wmem * pe * simd + assert ( + mw * mh == wmem * pe * simd + ), """Requirement (MW * MH) divisiable by + (WMEM * PE * SIMD) is violated.""" # see if we have any following thresholds consumer = model.find_consumer(mm_output) if consumer is not None and consumer.op_type == "MultiThreshold": @@ -45,7 +83,10 @@ class InferBinaryStreamingFCLayer(Transformation): mt_output = consumer.output[0] mt_thres = consumer.input[1] T = model.get_initializer(mt_thres) - assert T.shape[0] == 1 or T.shape[0] == mh + assert ( + T.shape[0] == 1 or T.shape[0] == mh + ), """First dimension of + thresholds neither 1 nor MH.""" odt = model.get_tensor_datatype(mt_output) if odt.bitwidth() == 1: # covers both bipolar and binary @@ -112,3 +153,126 @@ class InferBinaryStreamingFCLayer(Transformation): graph_modified = True return (model, graph_modified) + + +class InferQuantizedStreamingFCLayer(Transformation): + """Convert MatMul layers with quantized inputs and weights to + StreamingFCLayer_Batch layers. Any immediately following MultiThreshold + layers will also be absorbed into the MVTU.""" + + def apply(self, model): + graph = model.graph + node_ind = 0 + graph_modified = False + for n in graph.node: + node_ind += 1 + if n.op_type == "MatMul": + mm_input = n.input[0] + mm_weight = n.input[1] + idt = model.get_tensor_datatype(mm_input) + wdt = model.get_tensor_datatype(mm_weight) + if idt.is_integer() and wdt.is_integer(): + mm_output = n.output[0] + W = model.get_initializer(mm_weight) + # extract weight shape, note that ONNX and finn-hlslib + # make different assumptions about dim order here + # ONNX assumes W has (in, out) shape + # finn-hlslib assumes W has (out, in) shape + mh = int(W.shape[1]) + mw = int(W.shape[0]) + # create node with no parallelization first + pe = 1 + simd = 1 + assert mh % pe == 0, "Requirement MH divisable by PE is violated." + assert ( + mw % simd == 0 + ), "Requirement MW divisable by SIMD is violated." + wmem = mw * mh // (pe * simd) + assert ( + mw * mh == wmem * pe * simd + ), """Requirement (MW * MH) divisiable by + (WMEM * PE * SIMD) is violated.""" + # see if we have any following thresholds + consumer = model.find_consumer(mm_output) + if consumer is not None and consumer.op_type == "MultiThreshold": + # TODO ensure integer thresholds? + # create MVTU (i.e. including activation) + mt_output = consumer.output[0] + mt_thres = consumer.input[1] + T = model.get_initializer(mt_thres) + assert ( + T.shape[0] == 1 or T.shape[0] == mh + ), """First dimension of + thresholds neither 1 nor MH.""" + odt = model.get_tensor_datatype(mt_output) + scale = getCustomOp(consumer).get_nodeattr("out_scale") + assert ( + scale == 1.0 + ), "out_scale must be equal to 1.0 for HLS conversion." + actval = getCustomOp(consumer).get_nodeattr("out_bias") + assert ( + int(actval) == actval + ), "out_bias must be integer for HLS conversion." + actval = int(actval) + assert (not odt.signed()) or ( + actval < 0 + ), "Signed output requres actval < 0" + in_shape = [1, mw] + out_shape = [1, mh] + model.set_tensor_shape(mm_input, in_shape) + model.set_tensor_shape(mt_output, out_shape) + # create and insert new StreamingFCLayer node + new_node = helper.make_node( + "StreamingFCLayer_Batch", + [mm_input, mm_weight, mt_thres], + [mt_output], + domain="finn", + backend="fpgadataflow", + resType="ap_resource_lut()", + MW=mw, + MH=mh, + SIMD=simd, + PE=pe, + inputDataType=idt.name, + weightDataType=wdt.name, + outputDataType=odt.name, + ActVal=actval, + binaryXnorMode=0, + noActivation=0, + ) + graph.node.insert(node_ind, new_node) + # remove old nodes + graph.node.remove(n) + graph.node.remove(consumer) + graph_modified = True + else: + # no activation, matmul only + in_shape = [1, mw] + out_shape = [1, mh] + odt = model.get_tensor_datatype(mm_output) + model.set_tensor_shape(mm_input, in_shape) + model.set_tensor_shape(mm_output, out_shape) + # create and insert new StreamingFCLayer node + new_node = helper.make_node( + "StreamingFCLayer_Batch", + [mm_input, mm_weight], + [mm_output], + domain="finn", + backend="fpgadataflow", + resType="ap_resource_lut()", + MW=mw, + MH=mh, + SIMD=simd, + PE=pe, + inputDataType=idt.name, + weightDataType=wdt.name, + outputDataType=odt.name, + ActVal=0, + binaryXnorMode=0, + noActivation=1, + ) + graph.node.insert(node_ind, new_node) + # remove old node + graph.node.remove(n) + graph_modified = True + return (model, graph_modified) diff --git a/src/finn/transformation/fpgadataflow/create_dataflow_partition.py b/src/finn/transformation/fpgadataflow/create_dataflow_partition.py new file mode 100644 index 0000000000000000000000000000000000000000..e0f990600d9ca4be748b662b47ce8296d3d462ce --- /dev/null +++ b/src/finn/transformation/fpgadataflow/create_dataflow_partition.py @@ -0,0 +1,102 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import copy + +from onnx import helper + +from finn.transformation import Transformation +from finn.util.basic import get_by_name, make_build_dir + + +class CreateDataflowPartition(Transformation): + """Split a graph into two graphs; one which contains non-FINN-dataflow nodes + and a StreamingDataflowPartition node, and another which only contains + FINN dataflow nodes. The StreamingDataflowPartition has a model attribute + that indicates the filename for the second graph that only contains + dataflow nodes. No action is taken if there are no dataflow nodes.""" + + def __init__(self): + super().__init__() + + def apply(self, model): + # TODO we currently assume that all dataflow nodes are connected to + # each other, forming a single partition. check the assumption and/or + # improve this. + all_nodes = list(model.graph.node) + df_nodes = filter( + lambda x: get_by_name(x.attribute, "backend") is not None, all_nodes + ) + df_nodes = filter( + lambda x: get_by_name(x.attribute, "backend").s.decode("UTF-8") + == "fpgadataflow", + df_nodes, + ) + df_nodes = list(df_nodes) + non_df_nodes = filter(lambda x: x not in df_nodes, all_nodes) + non_df_nodes = list(non_df_nodes) + + if len(df_nodes) == 0: + # no changes if no dataflow nodes are present + return (model, False) + else: + # partition the model into two models + df_model = copy.deepcopy(model) + non_df_model = model + # remove all non-dataflow nodes from the dataflow model + for node_to_remove in non_df_nodes: + df_model.graph.node.remove(node_to_remove) + # identify the entry and exit points for the dataflow part + df_in = df_model.graph.node[0].input[0] + df_out = df_model.graph.node[-1].output[0] + df_in_vi = df_model.get_tensor_valueinfo(df_in) + df_out_vi = df_model.get_tensor_valueinfo(df_out) + # set df graph in/out to be df_in/df_out + df_model.graph.input.remove(df_model.graph.input[0]) + df_model.graph.input.insert(0, df_in_vi) + df_model.graph.output.remove(df_model.graph.output[0]) + df_model.graph.output.insert(0, df_out_vi) + df_model_dir = make_build_dir("dataflow_partition_") + df_model_filename = df_model_dir + "/df_model.onnx" + df_model.save(df_model_filename) + # remove all dataflow nodes from the non-dataflow model + # keep track of where the dataflow part starts + df_start_ind = all_nodes.index(df_nodes[0]) + for node_to_remove in df_nodes: + non_df_model.graph.node.remove(node_to_remove) + # create StreamingDataflow node with df_in/df_out io + df_node = helper.make_node( + "StreamingDataflowPartition", + [df_in], + [df_out], + # use the model attribute to mark the df model + model=df_model_filename, + ) + non_df_model.graph.node.insert(df_start_ind, df_node) + + return (non_df_model, False) diff --git a/src/finn/transformation/fpgadataflow/hlssynth_ipgen.py b/src/finn/transformation/fpgadataflow/hlssynth_ipgen.py new file mode 100644 index 0000000000000000000000000000000000000000..9fb7f8652d1fa5e624776a81ff6946d67882aa2a --- /dev/null +++ b/src/finn/transformation/fpgadataflow/hlssynth_ipgen.py @@ -0,0 +1,77 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import finn.custom_op.registry as registry +import finn.util.basic as util +from finn.transformation import Transformation + + +class HLSSynth_IPGen(Transformation): + """For each node: generate IP block from code in folder + that is referenced in node attribute "code_gen_dir_ipgen" + and save path of generated project in node attribute "ipgen_path". + All nodes in the graph must have the fpgadataflow backend attribute. + + This transformation calls Vivado HLS for synthesis, so it will run for + some time (several minutes)""" + + def __init__(self): + super().__init__() + + def apply(self, model): + for node in model.graph.node: + op_type = node.op_type + if node.domain == "finn": + backend_attribute = util.get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + try: + # lookup op_type in registry of CustomOps + inst = registry.custom_op[op_type](node) + # ensure that code is generated + assert ( + inst.get_nodeattr("code_gen_dir_ipgen") != "" + ), """Node + attribute "code_gen_dir_ipgen" is empty. Please run + transformation CodeGen_ipgen first.""" + # call the compilation function for this node + inst.ipgen_singlenode_code() + # ensure that executable path is now set + assert ( + inst.get_nodeattr("ipgen_path") != "" + ), """Transformation + HLSSynth_IPGen was not successful. Node attribute "ipgen_path" + is empty.""" + except KeyError: + # exception if op_type is not supported + raise Exception( + "Custom op_type %s is currently not supported." % op_type + ) + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/insert_tlastmarker.py b/src/finn/transformation/fpgadataflow/insert_tlastmarker.py new file mode 100644 index 0000000000000000000000000000000000000000..32f32ece585a93465ba32fede45d5eb606a2b0a3 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/insert_tlastmarker.py @@ -0,0 +1,76 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from onnx import TensorProto +from onnx import helper as oh + +from finn.custom_op.registry import getCustomOp +from finn.transformation import Transformation + + +class InsertTLastMarker(Transformation): + """Ensure that the graph is terminated with a TLastMarker node, inserting + one if necessary.""" + + def __init__(self): + super().__init__() + + def apply(self, model): + # TODO only makes sense for a pure fpgadataflow graph -- check! + graph_out_name = model.graph.output[0].name + final_node = model.find_producer(graph_out_name) + if final_node.op_type == "TLastMarker": + # TODO maybe check the correctness of properties + return (model, False) + else: + custom_op = getCustomOp(final_node) + num_iters = int(custom_op.get_number_output_values()) + stream_width = int(custom_op.get_outstream_width()) + out_shape = model.get_tensor_shape(graph_out_name) + out_dtype = model.get_tensor_datatype(graph_out_name) + elem_width = out_dtype.bitwidth() + # make new buffer + final_node_out = oh.make_tensor_value_info( + model.make_new_valueinfo_name(), TensorProto.FLOAT, out_shape + ) + model.graph.value_info.append(final_node_out) + model.set_tensor_datatype(final_node_out.name, out_dtype) + # reroute final node output to final_node_out_name + final_node.output[0] = final_node_out.name + tlast_node = oh.make_node( + "TLastMarker", + [final_node_out.name], + [graph_out_name], + NumIters=num_iters, + StreamWidth=stream_width, + ElemWidth=elem_width, + domain="finn", + backend="fpgadataflow", + ) + model.graph.node.append(tlast_node) + return (model, True) diff --git a/src/finn/transformation/fpgadataflow/make_deployment.py b/src/finn/transformation/fpgadataflow/make_deployment.py new file mode 100644 index 0000000000000000000000000000000000000000..d797773fe540e930267839c5926269a73736f354 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/make_deployment.py @@ -0,0 +1,96 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess +from distutils.dir_util import copy_tree +from shutil import copy + +from finn.transformation import Transformation +from finn.util.basic import make_build_dir + + +class DeployToPYNQ(Transformation): + """Collects all necessary files for deployment and copies them to the PYNQ board. + Expects information about PYNQ board to make scp possible: + + IP address of board, username and password for board and target directory where + the files are stored on the board""" + + def __init__(self, ip, username, password, target_dir): + super().__init__() + self.ip = ip + self.username = username + self.password = password + self.target_dir = target_dir + + def apply(self, model): + # set metadata properties accordingly to user input specifications + model.set_metadata_prop("pynq_ip", self.ip) + model.set_metadata_prop("pynq_username", self.username) + model.set_metadata_prop("pynq_password", self.password) + model.set_metadata_prop("pynq_target_dir", self.target_dir) + + # create directory for deployment files + deployment_dir = make_build_dir(prefix="pynq_deployment_") + model.set_metadata_prop("pynq_deployment_dir", deployment_dir) + + # get and copy necessary files + # .bit and .hwh file + vivado_pynq_proj = model.get_metadata_prop("vivado_pynq_proj") + for file in os.listdir(vivado_pynq_proj): + if file.endswith(".bit"): + bitfile = os.path.join(vivado_pynq_proj, file) + elif file.endswith(".hwh"): + hwhfile = os.path.join(vivado_pynq_proj, file) + copy(bitfile, deployment_dir) + copy(hwhfile, deployment_dir) + + # driver.py and python libraries + pynq_driver_dir = model.get_metadata_prop("pynq_driver_dir") + copy_tree(pynq_driver_dir, deployment_dir) + model.set_metadata_prop("pynq_deploy_dir", deployment_dir) + model.set_metadata_prop("exec_mode", "remote_pynq") + + # create target directory on PYNQ board + cmd = 'sshpass -p {} ssh {}@{} "mkdir -p {}"'.format( + self.password, self.username, self.ip, self.target_dir + ) + bash_command = ["/bin/bash", "-c", cmd] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + + # copy directory to PYNQ board using scp and sshpass + cmd = "sshpass -p {} scp -r {} {}@{}:{}".format( + self.password, deployment_dir, self.username, self.ip, self.target_dir + ) + bash_command = ["/bin/bash", "-c", cmd] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/make_pynq_driver.py b/src/finn/transformation/fpgadataflow/make_pynq_driver.py new file mode 100644 index 0000000000000000000000000000000000000000..0bde02fa2c330748a718f6debf931b7d83ac7814 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/make_pynq_driver.py @@ -0,0 +1,109 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import shutil + +from finn.custom_op.registry import getCustomOp +from finn.transformation import Transformation +from finn.util.basic import gen_finn_dt_tensor, get_finn_root, make_build_dir +from finn.util.data_packing import finnpy_to_packed_bytearray + +from . import templates + + +class MakePYNQDriver(Transformation): + """Create PYNQ Python code to correctly interface the generated + accelerator, including data packing/unpacking. The MakePYNQProject + transformation must have been already applied. + + Outcome if successful: sets the pynq_driver_dir attribute in the ONNX + ModelProto's metadata_props field, with the created driver dir as the + value. + """ + + def __init__(self): + super().__init__() + + def apply(self, model): + vivado_pynq_proj = model.get_metadata_prop("vivado_pynq_proj") + if vivado_pynq_proj is None or (not os.path.isdir(vivado_pynq_proj)): + raise Exception("No PYNQ project found, apply MakePYNQProject first.") + + # create a temporary folder for the generated driver + pynq_driver_dir = make_build_dir(prefix="pynq_driver_") + model.set_metadata_prop("pynq_driver_dir", pynq_driver_dir) + + # extract input-output shapes from the graph + # TODO convert this to an analysis pass + i_tensor_name = model.graph.input[0].name + o_tensor_name = model.graph.output[0].name + i_tensor_shape_normal = tuple(model.get_tensor_shape(i_tensor_name)) + o_tensor_shape_normal = tuple(model.get_tensor_shape(o_tensor_name)) + i_tensor_dt = model.get_tensor_datatype(i_tensor_name) + o_tensor_dt = model.get_tensor_datatype(o_tensor_name) + # extract HLSCustomOp instances to get folded i/o shapes + first_node = getCustomOp(model.find_consumer(i_tensor_name)) + last_node = getCustomOp(model.find_producer(o_tensor_name)) + i_tensor_shape_folded = first_node.get_folded_input_shape() + o_tensor_shape_folded = last_node.get_folded_output_shape() + # generate dummy folded i/o tensors and their packed versions + i_tensor_dummy_folded = gen_finn_dt_tensor(i_tensor_dt, i_tensor_shape_folded) + o_tensor_dummy_folded = gen_finn_dt_tensor(o_tensor_dt, o_tensor_shape_folded) + i_tensor_dummy_packed = finnpy_to_packed_bytearray( + i_tensor_dummy_folded, i_tensor_dt + ) + o_tensor_dummy_packed = finnpy_to_packed_bytearray( + o_tensor_dummy_folded, o_tensor_dt + ) + i_tensor_shape_packed = i_tensor_dummy_packed.shape + o_tensor_shape_packed = o_tensor_dummy_packed.shape + + # fill in the driver template + driver_py = pynq_driver_dir + "/driver.py" + driver = templates.pynq_driver_template + driver = driver.replace("$INPUT_FINN_DATATYPE$", str(i_tensor_dt)) + driver = driver.replace("$INPUT_SHAPE_NORMAL$", str(i_tensor_shape_normal)) + driver = driver.replace("$INPUT_SHAPE_FOLDED$", str(i_tensor_shape_folded)) + driver = driver.replace("$INPUT_SHAPE_PACKED$", str(i_tensor_shape_packed)) + driver = driver.replace("$OUTPUT_FINN_DATATYPE$", str(o_tensor_dt)) + driver = driver.replace("$OUTPUT_SHAPE_NORMAL$", str(o_tensor_shape_normal)) + driver = driver.replace("$OUTPUT_SHAPE_FOLDED$", str(o_tensor_shape_folded)) + driver = driver.replace("$OUTPUT_SHAPE_PACKED$", str(o_tensor_shape_packed)) + + with open(driver_py, "w") as f: + f.write(driver) + # copy all the dependencies into the driver folder + shutil.copytree( + get_finn_root() + "/src/finn/util", pynq_driver_dir + "/finn/util" + ) + shutil.copytree( + get_finn_root() + "/src/finn/core", pynq_driver_dir + "/finn/core" + ) + + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/make_pynq_proj.py b/src/finn/transformation/fpgadataflow/make_pynq_proj.py new file mode 100644 index 0000000000000000000000000000000000000000..41498edc078506b0d6db87f28dce558fdf5a1aa4 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/make_pynq_proj.py @@ -0,0 +1,154 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess + +from finn.custom_op.registry import getCustomOp +from finn.transformation import Transformation +from finn.util.basic import get_by_name, make_build_dir, roundup_to_integer_multiple + +from . import templates + + +class MakePYNQProject(Transformation): + """Create a Vivado PYNQ overlay project (including the shell infrastructure) + from the already-stitched IP block for this graph. + All nodes in the graph must have the fpgadataflow backend attribute, + and the CodeGen_ipstitch transformation must have been previously run on + the graph. + + Outcome if successful: sets the vivado_pynq_proj attribute in the ONNX + ModelProto's metadata_props field, with the created project dir as the + value. + """ + + def __init__(self, platform): + super().__init__() + self.platform = platform + + def apply(self, model): + pynq_shell_path = os.environ["PYNQSHELL_PATH"] + if not os.path.isdir(pynq_shell_path): + raise Exception("Ensure the PYNQ-HelloWorld utility repo is cloned.") + ipstitch_path = model.get_metadata_prop("vivado_stitch_proj") + if ipstitch_path is None or (not os.path.isdir(ipstitch_path)): + raise Exception( + "No stitched IPI design found, apply CodeGen_ipstitch first." + ) + vivado_stitch_vlnv = model.get_metadata_prop("vivado_stitch_vlnv") + if vivado_stitch_vlnv is None: + raise Exception( + "No vlnv for stitched IP found, apply CodeGen_ipstitch first." + ) + + # collect list of all IP dirs + ip_dirs = ["list"] + for node in model.graph.node: + ip_dir_attribute = get_by_name(node.attribute, "ipgen_path") + assert ( + ip_dir_attribute is not None + ), """Node attribute "ipgen_path" is + empty. Please run transformation HLSSynth_ipgen first.""" + ip_dir_value = ip_dir_attribute.s.decode("UTF-8") + ip_dir_value += "/sol1/impl/ip" + assert os.path.isdir( + ip_dir_value + ), """The directory that should + contain the generated ip blocks doesn't exist.""" + ip_dirs += [ip_dir_value] + ip_dirs += [ipstitch_path + "/ip"] + ip_dirs_str = "[%s]" % (" ".join(ip_dirs)) + + # extract HLSCustomOp instances to get i/o stream widths + i_tensor_name = model.graph.input[0].name + o_tensor_name = model.graph.output[0].name + first_node = getCustomOp(model.find_consumer(i_tensor_name)) + last_node = getCustomOp(model.find_producer(o_tensor_name)) + i_bits_per_cycle = first_node.get_instream_width() + o_bits_per_cycle = last_node.get_outstream_width() + # ensure i/o is padded to bytes + i_bits_per_cycle_padded = roundup_to_integer_multiple(i_bits_per_cycle, 8) + o_bits_per_cycle_padded = roundup_to_integer_multiple(o_bits_per_cycle, 8) + assert ( + i_bits_per_cycle_padded % 8 == 0 + ), """Padded input bits are not a + multiple of 8.""" + assert ( + o_bits_per_cycle_padded % 8 == 0 + ), """Padded output bits are not a + multiple of 8.""" + in_bytes = i_bits_per_cycle_padded / 8 + out_bytes = o_bits_per_cycle_padded / 8 + in_if_name = "in0_V_V_0" + out_if_name = "out_r_0" + clk_name = "ap_clk_0" + nrst_name = "ap_rst_n_0" + vivado_ip_cache = os.getenv("VIVADO_IP_CACHE", default="") + + # create a temporary folder for the project + vivado_pynq_proj_dir = make_build_dir(prefix="vivado_pynq_proj_") + model.set_metadata_prop("vivado_pynq_proj", vivado_pynq_proj_dir) + + ip_config_tcl = templates.ip_config_tcl_template % ( + vivado_pynq_proj_dir, + ip_dirs_str, + vivado_pynq_proj_dir, + vivado_stitch_vlnv, + in_bytes, + out_bytes, + in_if_name, + out_if_name, + clk_name, + nrst_name, + vivado_ip_cache, + ) + + with open(vivado_pynq_proj_dir + "/ip_config.tcl", "w") as f: + f.write(ip_config_tcl) + # create a shell script for project creation and synthesis + make_project_sh = vivado_pynq_proj_dir + "/make_project.sh" + working_dir = os.environ["PWD"] + ipcfg = vivado_pynq_proj_dir + "/ip_config.tcl" + with open(make_project_sh, "w") as f: + f.write( + templates.call_pynqshell_makefile_template + % (pynq_shell_path, self.platform, ipcfg, "block_design", working_dir) + ) + synth_project_sh = vivado_pynq_proj_dir + "/synth_project.sh" + with open(synth_project_sh, "w") as f: + f.write( + templates.call_pynqshell_makefile_template + % (pynq_shell_path, self.platform, ipcfg, "bitstream", working_dir) + ) + # call the project creation script + # synthesis script will be called with a separate transformation + bash_command = ["bash", make_project_sh] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/replace_verilog_relpaths.py b/src/finn/transformation/fpgadataflow/replace_verilog_relpaths.py new file mode 100644 index 0000000000000000000000000000000000000000..dc0a17893d9d9aa8f25fa7ca67242fca94810e3d --- /dev/null +++ b/src/finn/transformation/fpgadataflow/replace_verilog_relpaths.py @@ -0,0 +1,70 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os + +import finn.custom_op.registry as registry +import finn.util.basic as util +from finn.transformation import Transformation + + +class ReplaceVerilogRelPaths(Transformation): + """Convert ./ relative file paths to absolute ones for generated Verilog""" + + def __init__(self): + super().__init__() + + def apply(self, model): + for node in model.graph.node: + op_type = node.op_type + if node.domain == "finn": + backend_attribute = util.get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + try: + # lookup op_type in registry of CustomOps + inst = registry.custom_op[op_type](node) + # find the IP gen dir + ipgen_path = inst.get_nodeattr("ipgen_path") + if ipgen_path is not None and os.path.isdir(ipgen_path): + for dname, dirs, files in os.walk(ipgen_path): + for fname in files: + if fname.endswith(".v"): + fpath = os.path.join(dname, fname) + with open(fpath, "r") as f: + s = f.read() + old = '$readmemh(".' + new = '$readmemh("%s' % dname + s = s.replace(old, new) + with open(fpath, "w") as f: + f.write(s) + except KeyError: + pass + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/set_exec_mode.py b/src/finn/transformation/fpgadataflow/set_exec_mode.py new file mode 100644 index 0000000000000000000000000000000000000000..1f9c4c42189950e456da2dda77dee98fda49d522 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/set_exec_mode.py @@ -0,0 +1,66 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import finn.custom_op.registry as registry +import finn.util.basic as util +from finn.transformation import Transformation + + +class SetExecMode(Transformation): + """Set attribute exec_mode in all fpgadataflow nodes to specify which + kind of execution should be used ("npysim" or "rtlsim")""" + + def __init__(self, mode): + super().__init__() + self.mode = mode + + def apply(self, model): + for node in model.graph.node: + op_type = node.op_type + if node.domain == "finn": + backend_attribute = util.get_by_name(node.attribute, "backend") + if backend_attribute is None: + continue + backend_value = backend_attribute.s.decode("UTF-8") + if backend_value == "fpgadataflow": + try: + # lookup op_type in registry of CustomOps + inst = registry.custom_op[op_type](node) + # set sim_mode accordingly to argument mode + inst.set_nodeattr("exec_mode", self.mode) + # ensure that sim_mode is now set + assert ( + inst.get_nodeattr("exec_mode") != "" + ), """Transformation + was not successful. Node attribute "exec_mode" is not set""" + except KeyError: + # exception if op_type is not supported + raise Exception( + "Custom op_type %s is currently not supported." % op_type + ) + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/synth_pynq_proj.py b/src/finn/transformation/fpgadataflow/synth_pynq_proj.py new file mode 100644 index 0000000000000000000000000000000000000000..d7f73a7fe3dfcd0fef314304fe939623e577ac20 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/synth_pynq_proj.py @@ -0,0 +1,57 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess + +from finn.transformation import Transformation + + +class SynthPYNQProject(Transformation): + """Run synthesis for the PYNQ project for this graph. The MakePYNQProject + transformation must be applied prior to this transformation.""" + + def __init__(self): + super().__init__() + + def apply(self, model): + vivado_pynq_proj_dir = model.get_metadata_prop("vivado_pynq_proj") + if vivado_pynq_proj_dir is None or (not os.path.isdir(vivado_pynq_proj_dir)): + raise Exception("No synthesis project, apply MakePYNQProject first.") + synth_project_sh = vivado_pynq_proj_dir + "/synth_project.sh" + if not os.path.isfile(synth_project_sh): + raise Exception("No synthesis script, apply MakePYNQProject first.") + bash_command = ["bash", synth_project_sh] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + # set bitfile attribute + model.set_metadata_prop( + "vivado_pynq_bitfile", vivado_pynq_proj_dir + "/resizer.bit" + ) + # TODO pull out synthesis statistics and put them in as attributes + return (model, False) diff --git a/src/finn/transformation/fpgadataflow/templates.py b/src/finn/transformation/fpgadataflow/templates.py new file mode 100644 index 0000000000000000000000000000000000000000..edbf28c4e9d49129d22da12985f3b8c003e3d745 --- /dev/null +++ b/src/finn/transformation/fpgadataflow/templates.py @@ -0,0 +1,142 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# template for the PYNQ shell integration configuration tcl script +ip_config_tcl_template = """ +variable config_ip_repo +variable config_ip_vlnv +variable config_ip_bytes_in +variable config_ip_bytes_out +variable config_ip_axis_name_in +variable config_ip_axis_name_out +variable config_ip_use_axilite +variable config_ip_project_dir +variable config_output_products_dir +variable config_remote_cache + +# for arguments involving paths below: use absolute paths or relative to the +# platform/overlay/bitstream folder +# where to create the project +set config_ip_project_dir %s +# IP repositories that the project depends on +set config_ip_repo %s +# where the produced bitfile and .hwh file will be placed +set config_output_products_dir %s + +# non-path arguments +# VLNV of the IP block +set config_ip_vlnv %s +# width of the AXI stream into the IP, in bytes +set config_ip_bytes_in %d +# width of the AXI stream out of the IP, in bytes +set config_ip_bytes_out %d +# the name of the input AXI stream interface +set config_ip_axis_name_in %s +# the name of the output AXI stream interface +set config_ip_axis_name_out %s +# the name of the clock signal +set config_ip_clk_name %s +# the name of the active-low reset signal +set config_ip_nrst_name %s +# whether the IP needs an AXI Lite interface for control +set config_ip_use_axilite 0 +# Vivado OOC IP cache +set config_remote_cache "%s" +""" + +call_pynqshell_makefile_template = """ +#!/bin/bash +cd %s +export platform=%s +export ip_config=%s +make %s +cd %s +""" + +pynq_driver_template = """ +from pynq import Overlay +import numpy as np +from pynq import allocate +from finn.util.data_packing import ( + finnpy_to_packed_bytearray, + packed_bytearray_to_finnpy +) +from finn.core.datatype import DataType + +bitfile_path = "resizer.bit" +ol = Overlay(bitfile_path) +dma=ol.axi_dma_0 + +# declare input/output types and shapes for the accelerator +# input FINN DataType +idt = $INPUT_FINN_DATATYPE$ +# normal, folded and packed input shapes +ishape_normal = $INPUT_SHAPE_NORMAL$ +ishape_folded = $INPUT_SHAPE_FOLDED$ +ishape_packed = $INPUT_SHAPE_PACKED$ +# output FINN DataType +odt = $OUTPUT_FINN_DATATYPE$ +# normal, folded and packed output shapes +oshape_normal = $OUTPUT_SHAPE_NORMAL$ +oshape_folded = $OUTPUT_SHAPE_FOLDED$ +oshape_packed = $OUTPUT_SHAPE_PACKED$ + +# load desired input .npy file +ibuf_normal = np.load("input.npy") +# ensure that shape is as expected +assert ibuf_normal.shape == ishape_normal +# convert to folded form +ibuf_folded = ibuf_normal.reshape(ishape_folded) + +# pack the input buffer, reversing both SIMD dim and endianness +ibuf_packed = finnpy_to_packed_bytearray( + ibuf_folded, idt, reverse_endian=True, reverse_inner=True +) +# allocate a PYNQ buffer for the packed input buffer +ibuf_packed_device = allocate(shape=ishape_packed, dtype=np.uint8) +# copy the packed data into the PYNQ buffer +# TODO optimization: pack directly into the PYNQ buffer? +np.copyto(ibuf_packed_device, ibuf_packed) + +# allocate a PYNQ buffer for the returned packed output buffer +obuf_packed = allocate(shape=oshape_packed, dtype=np.uint8) + +# set up the DMA and wait until all transfers complete +dma.sendchannel.transfer(ibuf_packed_device) +dma.recvchannel.transfer(obuf_packed) +dma.sendchannel.wait() +dma.recvchannel.wait() + +# unpack the packed output buffer from accelerator +obuf_folded = packed_bytearray_to_finnpy( + obuf_packed, odt, oshape_folded, reverse_endian=True, reverse_inner=True +) +# convert to normal reshape and save +obuf_normal = obuf_folded.reshape(oshape_normal) +np.save("output.npy", obuf_normal) +""" diff --git a/src/finn/transformation/general.py b/src/finn/transformation/general.py index b6845312b06c409fe244ad1126d6ce37c0d85ca2..176772be902474b1ca0ff96ec6b8f88304749550 100644 --- a/src/finn/transformation/general.py +++ b/src/finn/transformation/general.py @@ -1,4 +1,32 @@ -import finn.core.utils as util +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import finn.util.basic as util from finn.transformation import Transformation diff --git a/src/finn/transformation/infer_datatypes.py b/src/finn/transformation/infer_datatypes.py index eaade9ebb7f09e39442e9cb8baba7c257720b45b..4c4620da472c5d34985be4054c36099bcc6c811d 100644 --- a/src/finn/transformation/infer_datatypes.py +++ b/src/finn/transformation/infer_datatypes.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import finn.custom_op.registry as registry from finn.core.datatype import DataType from finn.transformation import Transformation diff --git a/src/finn/transformation/infer_shapes.py b/src/finn/transformation/infer_shapes.py index 2bddc44673bcd09305401a9d50033141795b00c9..74a3e62e39ea9e60139b9385327c493ffaa25880 100644 --- a/src/finn/transformation/infer_shapes.py +++ b/src/finn/transformation/infer_shapes.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import onnx.shape_inference as si import finn.custom_op.registry as registry @@ -8,7 +36,7 @@ from finn.transformation import Transformation def _make_shape_compatible_op(node): """Return a shape-compatible non-FINN op for a given FINN op. Used for shape inference with custom ops.""" - assert node.domain == "finn" + assert node.domain == "finn", 'Node domain is not set to "finn".' op_type = node.op_type try: # lookup op_type in registry of CustomOps diff --git a/src/finn/transformation/streamline/__init__.py b/src/finn/transformation/streamline/__init__.py index 09065e740debb5c458cb1de263c5648df3046c41..b5a5bd2f65b41fdb1d0e1c048949c206adfa357b 100644 --- a/src/finn/transformation/streamline/__init__.py +++ b/src/finn/transformation/streamline/__init__.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from finn.transformation import Transformation from finn.transformation.infer_datatypes import InferDataTypes from finn.transformation.general import ( @@ -37,6 +65,7 @@ class Streamline(Transformation): ConvertSubToAdd(), BatchNormToAffine(), ConvertSignToThres(), + MoveAddPastMul(), MoveScalarAddPastMatMul(), MoveScalarMulPastMatMul(), MoveAddPastMul(), diff --git a/src/finn/transformation/streamline/absorb.py b/src/finn/transformation/streamline/absorb.py index 6806137e068a864fb2280638c7415881a6bf91c5..eb5845f198bc636e85d395b7bdf32e01b0222cf2 100644 --- a/src/finn/transformation/streamline/absorb.py +++ b/src/finn/transformation/streamline/absorb.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import helper as oh @@ -22,8 +50,8 @@ class AbsorbAddIntoMultiThreshold(Transformation): threshold_name = consumer.input[1] A = model.get_initializer(add_weight_name) T = model.get_initializer(threshold_name) - assert A is not None - assert T is not None + assert A is not None, "Initializer for add weights is not set." + assert T is not None, "Initializer for thresholds is not set." start_name = n.input[0] # we can only absorb 0d or 1d adds is_scalar = A.ndim == 0 or all(x == 1 for x in A.shape) @@ -54,7 +82,7 @@ class AbsorbMulIntoMultiThreshold(Transformation): if n.op_type == "Mul": mul_weight_name = n.input[1] A = model.get_initializer(mul_weight_name) - assert A is not None + assert A is not None, "Initializer for mul weights is not set." is_signed = (A < 0).any() is_scalar = A.ndim == 0 or all(x == 1 for x in A.shape) is_1d = A.ndim > 0 and np.prod(A.shape) == A.shape[-1] @@ -63,7 +91,7 @@ class AbsorbMulIntoMultiThreshold(Transformation): if not is_signed and (is_1d or is_scalar): threshold_name = consumer.input[1] T = model.get_initializer(threshold_name) - assert T is not None + assert T is not None, "Initializer for thresholds is not set." start_name = n.input[0] # compute new thresholds and set initializer Tnew = T / A.reshape(-1, 1) @@ -92,7 +120,7 @@ class FactorOutMulSignMagnitude(Transformation): if n.op_type == "Mul": mul_weight_name = n.input[1] A = model.get_initializer(mul_weight_name) - assert A is not None + assert A is not None, "Initializer for mul weights is not set." is_scalar = np.prod(A.shape) == 1 is_1d = len(A.shape) == 2 and A.shape[0] == 1 is_not_bipolar = ( @@ -133,16 +161,19 @@ class Absorb1BitMulIntoMatMul(Transformation): if n.op_type == "MatMul": matmul_weight_name = n.input[1] W = model.get_initializer(matmul_weight_name) - assert W is not None + assert W is not None, "Initializer for matmul weights is not set." consumer = model.find_consumer(n.output[0]) if consumer is not None and consumer.op_type == "Mul": mul_weight_name = consumer.input[1] A = model.get_initializer(mul_weight_name) - assert A is not None + assert A is not None, "Initializer for mul weights is not set." is_1bit = model.get_tensor_datatype(mul_weight_name).bitwidth() == 1 if is_1bit: Wnew = A * W - assert Wnew.shape == W.shape + assert ( + Wnew.shape == W.shape + ), """Shape of new weights is not + the same as the shape of the weight matrix before.""" model.set_initializer(matmul_weight_name, Wnew) n.output[0] = consumer.output[0] graph.node.remove(consumer) diff --git a/src/finn/transformation/streamline/collapse_repeated.py b/src/finn/transformation/streamline/collapse_repeated.py index 71a1c554eeadd31e12d790b52bf121a0344352a1..aa059747b602bc6b659bc8b53b1f18988bba1ef0 100644 --- a/src/finn/transformation/streamline/collapse_repeated.py +++ b/src/finn/transformation/streamline/collapse_repeated.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from onnx import helper as oh from finn.transformation import Transformation @@ -27,8 +55,14 @@ class CollapseRepeatedOp(Transformation): op1_param_name = consumer.input[1] op0_param = model.get_initializer(op0_param_name) op1_param = model.get_initializer(op1_param_name) - assert op0_param is not None - assert op1_param is not None + assert ( + op0_param is not None + ), """Initializer for parameters for + op0 is not set.""" + assert ( + op1_param is not None + ), """Initializer for parameters for + op1 is not set.""" start_name = n.input[0] end_name = consumer.output[0] # compute the new parameter @@ -50,10 +84,14 @@ class CollapseRepeatedOp(Transformation): class CollapseRepeatedAdd(CollapseRepeatedOp): + """Collapse repeated adder node into a single operation.""" + def __init__(self): super().__init__("Add", lambda x, y: y + x) class CollapseRepeatedMul(CollapseRepeatedOp): + """Collapse repeated multiplier node into a single operation.""" + def __init__(self): super().__init__("Mul", lambda x, y: y * x) diff --git a/src/finn/transformation/streamline/reorder.py b/src/finn/transformation/streamline/reorder.py index 1af7f7f7852975667008dd8548ee0dd613f0bc71..db55dc2021a0c9ef8330270aa9eb950dcf66c575 100644 --- a/src/finn/transformation/streamline/reorder.py +++ b/src/finn/transformation/streamline/reorder.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import helper as oh @@ -26,8 +54,8 @@ class MoveAddPastMul(Transformation): add_weight_name = n.input[1] A = model.get_initializer(mul_weight_name) B = model.get_initializer(add_weight_name) - assert A is not None - assert B is not None + assert A is not None, "Initializer for mul weights is not set." + assert B is not None, "Initializer for add weights is not set." start_name = n.input[0] middle_name = n.output[0] end_name = consumer.output[0] @@ -69,8 +97,8 @@ class MoveScalarMulPastMatMul(Transformation): matmul_weight_name = consumer.input[1] A = model.get_initializer(mul_weight_name) W = model.get_initializer(matmul_weight_name) - assert A is not None - assert W is not None + assert A is not None, "Initializer for mul weights is not set." + assert W is not None, "Initializer for matmul weights is not set." start_name = n.input[0] middle_name = n.output[0] end_name = consumer.output[0] @@ -112,8 +140,8 @@ class MoveScalarAddPastMatMul(Transformation): matmul_weight_name = consumer.input[1] A = model.get_initializer(add_weight_name) W = model.get_initializer(matmul_weight_name) - assert A is not None - assert W is not None + assert A is not None, "Initializer for add weights is not set." + assert W is not None, "Initializer for matmul weights is not set." start_name = n.input[0] middle_name = n.output[0] end_name = consumer.output[0] diff --git a/src/finn/transformation/streamline/round_thresholds.py b/src/finn/transformation/streamline/round_thresholds.py index 94bcf2e8acceaf38963413051b51ef67ad7faf95..c33281d85449c173a4631297fd1d67ac0aed8c81 100644 --- a/src/finn/transformation/streamline/round_thresholds.py +++ b/src/finn/transformation/streamline/round_thresholds.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from finn.transformation import Transformation diff --git a/src/finn/transformation/streamline/sign_to_thres.py b/src/finn/transformation/streamline/sign_to_thres.py index f1c8b63fbb95204891b59f5580dd1353b0bc20d2..d2b51df7a43c830516897e6bf7d2210698269da8 100644 --- a/src/finn/transformation/streamline/sign_to_thres.py +++ b/src/finn/transformation/streamline/sign_to_thres.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import helper as oh @@ -19,7 +47,10 @@ class ConvertSignToThres(Transformation): sign_out_name = n.output[0] # find consumer consumer = model.find_consumer(sign_out_name) - assert consumer is not None + assert ( + consumer is not None + ), """There is no consumer of the + sign_out tensor.""" # create thresholds thres_param_name = model.make_new_valueinfo_name() thres_param = np.asarray([[0]], dtype=np.float32) diff --git a/src/finn/util/__init__.py b/src/finn/util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..83c8e8bed70797f7d6c0138968f750f72e790386 --- /dev/null +++ b/src/finn/util/__init__.py @@ -0,0 +1,27 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/finn/core/utils.py b/src/finn/util/basic.py similarity index 58% rename from src/finn/core/utils.py rename to src/finn/util/basic.py index 0ea2392d903a7ea570b6972a33fab3945a051b43..ecc7cb192177cb2bb57a8d2efdfea91f22a488a1 100644 --- a/src/finn/core/utils.py +++ b/src/finn/util/basic.py @@ -1,21 +1,73 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os import random import string import subprocess +import tempfile import numpy as np -import onnx -from bitstring import BitArray from finn.core.datatype import DataType +# mapping from PYNQ board names to FPGA part names +pynq_part_map = dict() +pynq_part_map["Ultra96"] = "xczu3eg-sbva484-1-e" +pynq_part_map["Pynq-Z1"] = "xc7z020clg400-1" -def valueinfo_to_tensor(vi): - """Creates an all-zeroes numpy tensor from a ValueInfoProto.""" - dims = [x.dim_value for x in vi.type.tensor_type.shape.dim] - return np.zeros( - dims, dtype=onnx.mapping.TENSOR_TYPE_TO_NP_TYPE[vi.type.tensor_type.elem_type] - ) +def get_finn_root(): + "Return the root directory that FINN is cloned into." + + try: + return os.environ["FINN_ROOT"] + except KeyError: + raise Exception( + """Environment variable FINN_ROOT must be set + correctly. Please ensure you have launched the Docker contaier correctly. + """ + ) + + +def make_build_dir(prefix=""): + """Creates a temporary folder with given prefix to be used as a build dir. + Use this function instead of tempfile.mkdtemp to ensure any generated files + will survive on the host after the FINN Docker container exits.""" + try: + inst_prefix = os.environ["FINN_INST_NAME"] + "/" + return tempfile.mkdtemp(prefix=inst_prefix + prefix) + except KeyError: + raise Exception( + """Environment variable FINN_INST_NAME must be set + correctly. Please ensure you have launched the Docker contaier correctly. + """ + ) def get_by_name(container, name, name_field="name"): @@ -29,7 +81,7 @@ def get_by_name(container, name, name_field="name"): def remove_by_name(container, name, name_field="name"): - """Remove item from container by .name field if it exists""" + """Remove item from container by .name field if it exists.""" item = get_by_name(container, name, name_field) if item is not None: container.remove(item) @@ -41,85 +93,24 @@ def random_string(stringLength=6): return "".join(random.choice(lettersAndDigits) for i in range(stringLength)) -def array2hexstring(array, dtype, pad_to_nbits, prefix="0x"): - """ - Pack given one-dimensional NumPy array with FINN DataType dtype into a hex - string. - Any BIPOLAR values will be converted to a single bit with a 0 representing - -1. - pad_to_nbits is used to prepend leading zeros to ensure packed strings of - fixed width. The minimum value for pad_to_nbits is 4, since a single hex - digit is four bits. - - Examples: - array2hexstring([1, 1, 1, 0], DataType.BINARY, 4) = "e" - array2hexstring([1, 1, 1, 0], DataType.BINARY, 8) = "0e" - """ - if pad_to_nbits < 4: - pad_to_nbits = 4 - # ensure input is a numpy array with float values - if type(array) != np.ndarray or array.dtype != np.float32: - # try to convert to a float numpy array (container dtype is float) - array = np.asarray(array, dtype=np.float32) - # ensure one-dimensional array to pack - assert array.ndim == 1 - if dtype == DataType.BIPOLAR: - # convert bipolar values to binary - array = (array + 1) / 2 - dtype = DataType.BINARY - lineval = BitArray(length=0) - bw = dtype.bitwidth() - for val in array: - # ensure that this value is permitted by chosen dtype - assert dtype.allowed(val) - if dtype.is_integer(): - if dtype.signed(): - lineval.append(BitArray(int=int(val), length=bw)) - else: - lineval.append(BitArray(uint=int(val), length=bw)) - else: - lineval.append(BitArray(float=val, length=bw)) - if pad_to_nbits >= lineval.len: - # extend to the desired output width (a minimum of 4 bits) - lineval.prepend(BitArray(length=pad_to_nbits - lineval.len)) - else: - raise Exception("Number of bits is greater than pad_to_nbits") - # represent as hex - return prefix + lineval.hex - - -def pack_innermost_dim_as_hex_string(ndarray, dtype, pad_to_nbits): - """Pack the innermost dimension of the given numpy ndarray into hex - strings using array2hexstring. Examples: - - A = [[1, 1, 1, 0], [0, 1, 1, 0]] - eA = ["0e", "06"] - pack_innermost_dim_as_hex_string(A, DataType.BINARY, 8) == eA - B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] - eB = [[ "0f", "0f"], ["07", "0d"]] - pack_innermost_dim_as_hex_string(B, DataType.UINT2, 8) == eB - """ - - if type(ndarray) != np.ndarray or ndarray.dtype != np.float32: - # try to convert to a float numpy array (container dtype is float) - ndarray = np.asarray(ndarray, dtype=np.float32) - - def fun(x): - return array2hexstring(x, dtype, pad_to_nbits) - - return np.apply_along_axis(fun, ndarray.ndim - 1, ndarray) - - def interleave_matrix_outer_dim_from_partitions(matrix, n_partitions): + """Interleave the outermost dimension of a matrix from given + partitions (n_partitions).""" if type(matrix) != np.ndarray or matrix.dtype != np.float32: # try to convert to a float numpy array (container dtype is float) matrix = np.asarray(matrix, dtype=np.float32) shp = matrix.shape ndim = matrix.ndim # ensure # partitions evenly divide the outermost dimension - assert shp[0] % n_partitions == 0 + assert ( + shp[0] % n_partitions == 0 + ), """The outermost dimension is not divisable + by the number of partitions.""" # only tested for matrices - assert ndim == 2 + assert ( + ndim == 2 + ), """The dimension of the matrix is not 2. Currently this function + only works for matrices.""" # interleave rows between PEs using reshape + transpose matrix_r = matrix.reshape(-1, n_partitions, shp[1]).transpose((1, 0, 2)) matrix_r = matrix_r.reshape(n_partitions, -1, shp[1]) @@ -131,13 +122,13 @@ def roundup_to_integer_multiple(x, factor): Returns x if factor is set to -1. Both x and factor must otherwise be positive.""" # ensure integers - assert int(x) == x - assert int(factor) == factor + assert int(x) == x, "The input x is not an integer." + assert int(factor) == factor, "The input factor is not an integer." # use -1 to indicate no padding needed if factor == -1: return x # ensure positive values - assert factor > 0 and x > 0 + assert factor > 0 and x > 0, "Factor and x are <= 0." if x < factor: return factor else: @@ -157,7 +148,10 @@ def pad_tensor_to_multiple_of(ndarray, pad_to_dims, val=0, distr_pad=False): if type(ndarray) != np.ndarray or ndarray.dtype != np.float32: # try to convert to a float numpy array (container dtype is float) ndarray = np.asarray(ndarray, dtype=np.float32) - assert ndarray.ndim == len(pad_to_dims) + assert ndarray.ndim == len( + pad_to_dims + ), """The dimensions of the input + array don't match the length of the pad_to_dims value.""" # compute the desired shape desired = zip(list(ndarray.shape), list(pad_to_dims)) desired = map(lambda x: roundup_to_integer_multiple(x[0], x[1]), desired) @@ -173,12 +167,17 @@ def pad_tensor_to_multiple_of(ndarray, pad_to_dims, val=0, distr_pad=False): # all padding is added after the existing values pad_amt = list(map(lambda x: (0, x), pad_amt)) ret = np.pad(ndarray, pad_amt, mode="constant", constant_values=val) - assert (np.asarray(ret.shape, dtype=np.int32) == desired).all() + assert ( + np.asarray(ret.shape, dtype=np.int32) == desired + ).all(), """The + calculated output array doesn't match the desired/expected one.""" return ret def gen_finn_dt_tensor(finn_dt, tensor_shape): - """Generates random tensor in given shape and with given FINN DataType""" + """Generates random tensor in given shape and with given FINN DataType.""" + if type(tensor_shape) == list: + tensor_shape = tuple(tensor_shape) if finn_dt == DataType.BIPOLAR: tensor_values = np.random.randint(2, size=tensor_shape) tensor_values = 2 * tensor_values - 1 @@ -199,9 +198,12 @@ def gen_finn_dt_tensor(finn_dt, tensor_shape): def calculate_signed_dot_prod_range(dt_a, dt_b, len): """Returns the (min,max) values a dot product between two signed vectors of types dt_a and dt_b of len elements can take.""" - assert dt_a.signed() and dt_b.signed() + assert ( + dt_a.signed() and dt_b.signed() + ), """The input values are not both + signed vectors.""" min_prod = 2 ** 30 - max_prod = -2 ** 30 + max_prod = -(2 ** 30) for a_val in [dt_a.min(), dt_a.max()]: for b_val in [dt_b.min(), dt_b.max()]: prod = a_val * b_val * len @@ -213,6 +215,9 @@ def calculate_signed_dot_prod_range(dt_a, dt_b, len): class CppBuilder: + """Builds the g++ compiler command to produces the executable of the c++ code + in code_gen_dir which is passed to the function build() of this class.""" + def __init__(self): self.include_paths = [] self.cpp_files = [] @@ -222,15 +227,21 @@ class CppBuilder: self.compile_script = "" def append_includes(self, library_path): + """Adds given library path to include_paths list.""" self.include_paths.append(library_path) def append_sources(self, cpp_file): + """Adds given c++ file to cpp_files list.""" self.cpp_files.append(cpp_file) def set_executable_path(self, path): + """Sets member variable "executable_path" to given path.""" self.executable_path = path def build(self, code_gen_dir): + """Builds the g++ compiler command according to entries in include_paths + and cpp_files lists. Saves it in bash script in given folder and + executes it.""" # raise error if includes are empty self.code_gen_dir = code_gen_dir self.compile_components.append("g++ -o " + str(self.executable_path)) diff --git a/src/finn/util/data_packing.py b/src/finn/util/data_packing.py new file mode 100644 index 0000000000000000000000000000000000000000..ae98d312e7c5923a572f918430aececf29f3e094 --- /dev/null +++ b/src/finn/util/data_packing.py @@ -0,0 +1,387 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import binascii +import os +import sys + +import numpy as np +from bitstring import BitArray + +from finn.core.datatype import DataType +from finn.util.basic import roundup_to_integer_multiple + + +def array2hexstring(array, dtype, pad_to_nbits, prefix="0x", reverse=False): + """ + Pack given one-dimensional NumPy array with FINN DataType dtype into a hex + string. + Any BIPOLAR values will be converted to a single bit with a 0 representing + -1. + pad_to_nbits is used to prepend leading zeros to ensure packed strings of + fixed width. The minimum value for pad_to_nbits is 4, since a single hex + digit is four bits. reverse can be used to reverse the array prior to + packing. + + Examples: + + array2hexstring([1, 1, 1, 0], DataType.BINARY, 4) = "0xe" + + array2hexstring([1, 1, 1, 0], DataType.BINARY, 8) = "0x0e" + + array2hexstring([1, 1, 0, 1], DataType.BINARY, 4, reverse=True) = "0xb" + + array2hexstring([1, 1, 1, 0], DataType.BINARY, 8, reverse=True) = "0x07" + """ + if pad_to_nbits < 4: + pad_to_nbits = 4 + # ensure input is a numpy array with float values + if type(array) != np.ndarray or array.dtype != np.float32: + # try to convert to a float numpy array (container dtype is float) + array = np.asarray(array, dtype=np.float32) + # ensure one-dimensional array to pack + assert array.ndim == 1, "The given array is not one-dimensional." + if dtype == DataType.BIPOLAR: + # convert bipolar values to binary + array = (array + 1) / 2 + dtype = DataType.BINARY + # reverse prior to packing, if desired + if reverse: + array = np.flip(array, -1) + lineval = BitArray(length=0) + bw = dtype.bitwidth() + for val in array: + # ensure that this value is permitted by chosen dtype + assert dtype.allowed(val), "This value is not permitted by chosen dtype." + if dtype.is_integer(): + if dtype.signed(): + lineval.append(BitArray(int=int(val), length=bw)) + else: + lineval.append(BitArray(uint=int(val), length=bw)) + else: + lineval.append(BitArray(float=val, length=bw)) + if pad_to_nbits >= lineval.len: + # extend to the desired output width (a minimum of 4 bits) + lineval.prepend(BitArray(length=pad_to_nbits - lineval.len)) + else: + raise Exception("Number of bits is greater than pad_to_nbits") + # represent as hex + return prefix + lineval.hex + + +def hexstring2npbytearray(hexstring, remove_prefix="0x"): + """Convert a hex string into a NumPy array of dtype uint8. + + Example: + + hexstring2npbytearray("0f01") = array([15, 1], dtype=uint8) + """ + # remove prefix if found + if hexstring.startswith(remove_prefix): + lrp = len(remove_prefix) + hexstring = hexstring[lrp:] + # use Python's built-in bytearray + return np.asarray(bytearray.fromhex(hexstring), dtype=np.uint8) + + +def npbytearray2hexstring(npbytearray, prefix="0x"): + """Convert a NumPy array of uint8 dtype into a hex string. + + Example: + + npbytearray2hexstring(array([15, 1], dtype=uint8)) = "0x0f01" + """ + return prefix + binascii.hexlify(bytearray(npbytearray)).decode("utf-8") + + +def pack_innermost_dim_as_hex_string(ndarray, dtype, pad_to_nbits, reverse_inner=False): + """Pack the innermost dimension of the given numpy ndarray into hex + strings using array2hexstring. + + Examples: + + A = [[1, 1, 1, 0], [0, 1, 1, 0]] + + eA = ["0e", "06"] + + pack_innermost_dim_as_hex_string(A, DataType.BINARY, 8) == eA + + B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + + eB = [[ "0f", "0f"], ["07", "0d"]] + + pack_innermost_dim_as_hex_string(B, DataType.UINT2, 8) == eB + """ + + if type(ndarray) != np.ndarray or ndarray.dtype != np.float32: + # try to convert to a float numpy array (container dtype is float) + ndarray = np.asarray(ndarray, dtype=np.float32) + + def fun(x): + return array2hexstring(x, dtype, pad_to_nbits, reverse=reverse_inner) + + return np.apply_along_axis(fun, ndarray.ndim - 1, ndarray) + + +def unpack_innermost_dim_from_hex_string( + ndarray, dtype, out_shape, packedBits, reverse_inner=False +): + """Convert a NumPy array of hex strings into a FINN NumPy array by unpacking + the hex strings into the specified data type. out_shape can be specified + such that any padding in the packing dimension is removed. If reverse_inner + is set, the innermost unpacked dimension will be reversed.""" + + if type(ndarray) != np.ndarray: + raise Exception( + """unpack_innermost_dim_from_hex_string needs ndarray + as input""" + ) + if ndarray.dtype.kind not in {"U", "S"}: + raise Exception( + """unpack_innermost_dim_from_hex_string needs ndarray of + hex strings as input""" + ) + # convert ndarray into flattened list + data = ndarray.flatten().tolist() + targetBits = dtype.bitwidth() + # calculate outer and inner dim shapes + outer_dim_elems = 1 + for dim in range(len(out_shape) - 1): + outer_dim_elems = outer_dim_elems * out_shape[dim] + inner_dim_elems = out_shape[-1] + + array = [] + for outer_elem in range(outer_dim_elems): + ar_list = [] + ar_elem = data[0] + data.pop(0) + ar_elem = ar_elem.split("x") + ar_elem_bin = bin(int(ar_elem[1], 16))[2:].zfill(packedBits) + ar_elem_bin = [int(x) for x in ar_elem_bin] + + ar_elem_bin.reverse() + for i in range(inner_dim_elems): + upper_limit = (i + 1) * targetBits + lower_limit = i * targetBits + elem = ar_elem_bin[lower_limit:upper_limit] + elem.reverse() + elem_str = "".join(map(str, elem)) + ar_list.append(int(elem_str, 2)) + # reverse inner dimension back to "normal" positions + if reverse_inner is False: + ar_list.reverse() + + # interpret output values correctly + + # interpret values as bipolar + if dtype == DataType.BIPOLAR: + ar_list = [2 * x - 1 for x in ar_list] + # interpret values as signed values + elif dtype.name.startswith("INT"): + mask = 2 ** (dtype.bitwidth() - 1) + ar_list = [-(x & mask) + (x & ~mask) for x in ar_list] + + array.append(ar_list) + array = np.asarray(array, dtype=np.float32).reshape(out_shape) + return array + + +def numpy_to_hls_code( + ndarray, dtype, hls_var_name, pack_innermost_dim=True, no_decl=False +): + """Return C++ code representation of a numpy ndarray with FINN DataType + dtype, using hls_var_name as the resulting C++ variable name. If + pack_innermost_dim is specified, the innermost dimension of the ndarray + will be packed into a hex string using array2hexstring. If no_decl is + set to True, no variable name and type will be generated as part of the + emitted string. + """ + hls_dtype = dtype.get_hls_datatype_str() + if type(ndarray) != np.ndarray or ndarray.dtype != np.float32: + # try to convert to a float numpy array (container dtype is float) + ndarray = np.asarray(ndarray, dtype=np.float32) + if pack_innermost_dim: + idimlen = ndarray.shape[-1] + idimbits = idimlen * dtype.bitwidth() + ndarray = pack_innermost_dim_as_hex_string(ndarray, dtype, idimbits) + hls_dtype = "ap_uint<%d>" % idimbits + ndims = ndarray.ndim + # add type string and variable name + # e.g. "const ap_uint<64>" "weightMem0" + ret = "%s %s" % (hls_dtype, hls_var_name) + # add dimensions + for d in range(ndims): + ret += "[%d]" % ndarray.shape[d] + orig_printops = np.get_printoptions() + np.set_printoptions(threshold=sys.maxsize) + + # define a function to convert a single element into a C++ init string + # a single element can be a hex string if we are using packing + def elem2str(x): + if type(x) == str or type(x) == np.str_ or type(x) == np.str: + return '%s("%s", 16)' % (hls_dtype, x) + elif type(x) == np.float32: + if dtype == DataType.FLOAT32: + return str(x) + else: + return str(int(x)) + else: + raise Exception("Unsupported type for numpy_to_hls_code") + + strarr = np.array2string(ndarray, separator=", ", formatter={"all": elem2str}) + np.set_printoptions(**orig_printops) + strarr = strarr.replace("[", "{").replace("]", "}") + if no_decl: + ret = strarr + ";" + else: + ret = ret + " = \n" + strarr + ";" + return ret + + +def npy_to_rtlsim_input(input_file, input_dtype, pad_to_nbits, reverse_inner=True): + """Convert the multidimensional NumPy array of integers (stored as floats) + from input_file into a flattened sequence of Python arbitrary-precision + integers, packing the innermost dimension. See + finn.util.basic.pack_innermost_dim_as_hex_string() for more info on how the + packing works. If reverse_inner is set, the innermost dimension will be + reversed prior to packing.""" + if issubclass(type(input_file), np.ndarray): + inp = input_file + elif os.path.isfile(input_file): + inp = np.load(input_file) + else: + raise Exception("input_file must be ndarray or filename for .npy") + packed_data = pack_innermost_dim_as_hex_string( + inp, input_dtype, pad_to_nbits, reverse_inner=reverse_inner + ) + packed_data = packed_data.flatten() + packed_data = [int(x[2:], 16) for x in packed_data] + return packed_data + + +def rtlsim_output_to_npy( + output, path, dtype, shape, packedBits, targetBits, reverse_inner=True +): + """Convert a flattened sequence of Python arbitrary-precision integers + output into a NumPy array, saved as npy file at path. Each arbitrary-precision + integer is assumed to be a packed array of targetBits-bit elements, which + will be unpacked as the innermost dimension of the NumPy array.""" + + # TODO should have its own testbench? + output = np.asarray([hex(int(x)) for x in output]) + out_array = unpack_innermost_dim_from_hex_string( + output, dtype, shape, packedBits=packedBits, reverse_inner=reverse_inner + ) + np.save(path, out_array) + return out_array + + +def finnpy_to_packed_bytearray( + ndarray, dtype, reverse_inner=False, reverse_endian=False +): + """Given a numpy ndarray with FINN DataType dtype, pack the innermost + dimension and return the packed representation as an ndarray of uint8. + The packed innermost dimension will be padded to the nearest multiple + of 8 bits. The returned ndarray has the same number of dimensions as the + input. + """ + + if (not issubclass(type(ndarray), np.ndarray)) or ndarray.dtype != np.float32: + # try to convert to a float numpy array (container dtype is float) + ndarray = np.asarray(ndarray, dtype=np.float32) + # pack innermost dim to hex strings padded to 8 bits + bits = dtype.bitwidth() * ndarray.shape[-1] + bits_padded = roundup_to_integer_multiple(bits, 8) + packed_hexstring = pack_innermost_dim_as_hex_string( + ndarray, dtype, bits_padded, reverse_inner=reverse_inner + ) + + def fn(x): + return np.asarray(list(map(hexstring2npbytearray, x))) + + if packed_hexstring.ndim == 0: + # scalar, call hexstring2npbytearray directly + ret = hexstring2npbytearray(np.asscalar(packed_hexstring)) + else: + # convert ndarray of hex strings to byte array + ret = np.apply_along_axis(fn, packed_hexstring.ndim - 1, packed_hexstring) + if reverse_endian: + # reverse the endianness of packing dimension + ret = np.flip(ret, axis=-1) + return ret + + +def packed_bytearray_to_finnpy( + packed_bytearray, + dtype, + output_shape=None, + reverse_inner=False, + reverse_endian=False, +): + """Given a packed numpy uint8 ndarray, unpack it into a FINN array of + given DataType. + + output_shape can be specified to remove padding from the + packed dimension, or set to None to be inferred from the input.""" + + if ( + not issubclass(type(packed_bytearray), np.ndarray) + ) or packed_bytearray.dtype != np.uint8: + raise Exception("packed_bytearray_to_finnpy needs NumPy uint8 arrays") + if packed_bytearray.ndim == 0: + raise Exception("packed_bytearray_to_finnpy expects at least 1D ndarray") + packed_dim = packed_bytearray.ndim - 1 + packed_bits = packed_bytearray.shape[packed_dim] * 8 + target_bits = dtype.bitwidth() + if output_shape is None: + # determine output shape from input shape + assert ( + packed_bits % target_bits == 0 + ), """packed_bits are not divisable by + target_bits.""" + n_target_elems = packed_bits // target_bits + output_shape = packed_bytearray.shape[:-1] + (n_target_elems,) + # if reverse_endian and target_bits > 8: + # # revse the endianness of each element + # orig_shape = packed_bytearray.shape + # assert target_bits % 8 == 0, "target_bits are not a multiple of 8." + # target_bytes = target_bits // 8 + # new_shape = orig_shape[:-1] + (-1, target_bytes) + # packed_bytearray = np.flip(packed_bytearray.reshape(new_shape), axis=-1) + # packed_bytearray = packed_bytearray.reshape(orig_shape) + if reverse_endian: + packed_bytearray = np.flip(packed_bytearray, axis=-1) + # convert innermost dim of byte array to hex strings + packed_hexstring = np.apply_along_axis( + npbytearray2hexstring, packed_dim, packed_bytearray + ) + ret = unpack_innermost_dim_from_hex_string( + packed_hexstring, dtype, output_shape, packed_bits, reverse_inner + ) + + return ret diff --git a/src/finn/util/fpgadataflow.py b/src/finn/util/fpgadataflow.py new file mode 100644 index 0000000000000000000000000000000000000000..29607b002bd7d7748d450c84f816606d18fded81 --- /dev/null +++ b/src/finn/util/fpgadataflow.py @@ -0,0 +1,89 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess + +from pyverilator import PyVerilator + + +class IPGenBuilder: + """Builds the bash script to generate IP blocks using Vivado HLS.""" + + def __init__(self): + self.tcl_script = "" + self.ipgen_path = "" + self.code_gen_dir = "" + self.ipgen_script = "" + + def append_tcl(self, tcl_script): + """Sets member variable "tcl_script" to given tcl script.""" + self.tcl_script = tcl_script + + def set_ipgen_path(self, path): + """Sets member variable ipgen_path to given path.""" + self.ipgen_path = path + + def build(self, code_gen_dir): + """Builds the bash script with given parameters and saves it in given folder. + To guarantee the generation in the correct folder the bash script contains a + cd command.""" + self.code_gen_dir = code_gen_dir + self.ipgen_script = str(self.code_gen_dir) + "/ipgen.sh" + working_dir = os.environ["PWD"] + f = open(self.ipgen_script, "w") + f.write("#!/bin/bash \n") + f.write("cd {}\n".format(code_gen_dir)) + f.write("vivado_hls {}\n".format(self.tcl_script)) + f.write("cd {}\n".format(working_dir)) + f.close() + bash_command = ["bash", self.ipgen_script] + process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE) + process_compile.communicate() + + +def pyverilate_stitched_ip(model): + "Given a model with stitched IP, return a PyVerilator sim object." + vivado_stitch_proj_dir = model.get_metadata_prop("vivado_stitch_proj") + with open(vivado_stitch_proj_dir + "/all_verilog_srcs.txt", "r") as f: + all_verilog_srcs = f.read().split() + + def file_to_dir(x): + return os.path.dirname(os.path.realpath(x)) + + all_verilog_dirs = list(map(file_to_dir, all_verilog_srcs)) + top_verilog = model.get_metadata_prop("wrapper_filename") + sim = PyVerilator.build(top_verilog, verilog_path=all_verilog_dirs) + return sim + + +def pyverilate_get_liveness_threshold_cycles(): + """Return the number of no-output cycles rtlsim will wait before assuming + the simulation is not finishing and throwing an exception.""" + + return int(os.getenv("LIVENESS_THRESHOLD", 10000)) diff --git a/src/finn/util/onnx.py b/src/finn/util/onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..b9932111d86d7206b23e1d0e49a6aa8451f8ba24 --- /dev/null +++ b/src/finn/util/onnx.py @@ -0,0 +1,39 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import numpy as np +import onnx + + +def valueinfo_to_tensor(vi): + """Creates an all-zeroes numpy tensor from a ValueInfoProto.""" + + dims = [x.dim_value for x in vi.type.tensor_type.shape.dim] + return np.zeros( + dims, dtype=onnx.mapping.TENSOR_TYPE_TO_NP_TYPE[vi.type.tensor_type.elem_type] + ) diff --git a/src/finn/util/test.py b/src/finn/util/test.py new file mode 100644 index 0000000000000000000000000000000000000000..428ac3ea63e6913ed12364785b3ebfae527d1fdb --- /dev/null +++ b/src/finn/util/test.py @@ -0,0 +1,66 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import torch +from models.CNV import CNV +from models.LFC import LFC +from models.SFC import SFC +from models.TFC import TFC + + +def get_trained_checkpoint(netname, wbits, abits): + """Returns the weights and activations from the FINN Brevitas test networks + for given netname and the number of bits for weights and activations""" + # TODO get from config instead, hardcoded to Docker path for now + nname = "%s_%dW%dA" % (netname, wbits, abits) + root = "/workspace/brevitas_cnv_lfc/pretrained_models/%s/checkpoints/best.tar" + return root % nname + + +def get_test_model_def_fxn(netname): + """Returns the PyTorch model instantation function related to netname.""" + model_def_map = {"LFC": LFC, "SFC": SFC, "TFC": TFC, "CNV": CNV} + return model_def_map[netname] + + +def get_test_model_trained(netname, wbits, abits): + """Returns the pretrained model specified by input arguments loaded with weights + and activations from the FINN Brevitas test networks.""" + model_def_fxn = get_test_model_def_fxn(netname) + checkpoint_loc = get_trained_checkpoint(netname, wbits, abits) + fc = model_def_fxn(weight_bit_width=wbits, act_bit_width=abits, in_bit_width=abits) + checkpoint = torch.load(checkpoint_loc, map_location="cpu") + fc.load_state_dict(checkpoint["state_dict"]) + return fc.eval() + + +def get_test_model_untrained(netname, wbits, abits): + """Returns untrained model specified by input arguments.""" + model_def_fxn = get_test_model_def_fxn(netname) + fc = model_def_fxn(weight_bit_width=wbits, act_bit_width=abits, in_bit_width=abits) + return fc.eval() diff --git a/tests/analysis/test_is_linear.py b/tests/analysis/test_is_linear.py index cce4612a93112608564b59d04c0370b195c5f2d4..6afe9bb9c57f5f20486a9a35bab9902e2d952b02 100644 --- a/tests/analysis/test_is_linear.py +++ b/tests/analysis/test_is_linear.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import onnx.helper as oh from onnx import TensorProto diff --git a/tests/analysis/test_topology_checks.py b/tests/analysis/test_topology_checks.py index e28ceac09ecfdd242285f6b9e355bd4c2cfd7e68..41fbdb6cac8e81d6b1e3eed54a71d0e1d43c3adc 100644 --- a/tests/analysis/test_topology_checks.py +++ b/tests/analysis/test_topology_checks.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from pkgutil import get_data import onnx.helper as oh diff --git a/tests/test_brevitas_cnv.py b/tests/brevitas/test_brevitas_cnv.py similarity index 58% rename from tests/test_brevitas_cnv.py rename to tests/brevitas/test_brevitas_cnv.py index 6c2eeaa7e3c78c44ab0611d0f40223c6f41969bd..3c9f8d08223ed57d5b7409093f39d1af9613be83 100644 --- a/tests/test_brevitas_cnv.py +++ b/tests/brevitas/test_brevitas_cnv.py @@ -1,25 +1,49 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os import pkg_resources as pk import brevitas.onnx as bo import numpy as np import torch -from models.CNV import CNV import finn.core.onnx_exec as oxe from finn.core.modelwrapper import ModelWrapper from finn.transformation.fold_constants import FoldConstants from finn.transformation.infer_shapes import InferShapes +from finn.util.test import get_test_model_trained, get_test_model_untrained export_onnx_path = "test_output_cnv.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_cnv_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/CNV_1W1A/checkpoints/best.tar" -) def test_brevitas_cnv_w1a1_export(): - cnv = CNV(weight_bit_width=1, act_bit_width=1, in_bit_width=1, in_ch=3).eval() + cnv = get_test_model_untrained("CNV", 1, 1) bo.export_finn_onnx(cnv, (1, 3, 32, 32), export_onnx_path) model = ModelWrapper(export_onnx_path) assert model.graph.node[2].op_type == "Sign" @@ -31,9 +55,7 @@ def test_brevitas_cnv_w1a1_export(): def test_brevitas_cnv_w1a1_export_exec(): - cnv = CNV(weight_bit_width=1, act_bit_width=1, in_bit_width=1, in_ch=3).eval() - checkpoint = torch.load(trained_cnv_checkpoint, map_location="cpu") - cnv.load_state_dict(checkpoint["state_dict"]) + cnv = get_test_model_trained("CNV", 1, 1) bo.export_finn_onnx(cnv, (1, 3, 32, 32), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) @@ -53,11 +75,9 @@ def test_brevitas_cnv_w1a1_export_exec(): os.remove(export_onnx_path) -def test_brevitas_trained_cnv_w1a1_pytorch(): +def test_brevitas_cnv_w1a1_pytorch(): # load pretrained weights into CNV-w1a1 - cnv = CNV(weight_bit_width=1, act_bit_width=1, in_bit_width=1, in_ch=3).eval() - checkpoint = torch.load(trained_cnv_checkpoint, map_location="cpu") - cnv.load_state_dict(checkpoint["state_dict"]) + cnv = get_test_model_trained("CNV", 1, 1) fn = pk.resource_filename("finn", "data/cifar10/cifar10-test-data-class3.npz") input_tensor = np.load(fn)["arr_0"] input_tensor = torch.from_numpy(input_tensor).float() diff --git a/tests/brevitas/test_brevitas_fc.py b/tests/brevitas/test_brevitas_fc.py new file mode 100644 index 0000000000000000000000000000000000000000..6be2c9a255ead2cd3f9cbcceaca60816060b19bf --- /dev/null +++ b/tests/brevitas/test_brevitas_fc.py @@ -0,0 +1,74 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from pkgutil import get_data + +import pytest + +import brevitas.onnx as bo +import numpy as np +import onnx +import onnx.numpy_helper as nph +import torch + +import finn.core.onnx_exec as oxe +from finn.core.modelwrapper import ModelWrapper +from finn.transformation.fold_constants import FoldConstants +from finn.transformation.infer_shapes import InferShapes +from finn.util.basic import make_build_dir +from finn.util.test import get_test_model_trained + +export_onnx_path = make_build_dir("test_brevitas_fc_") + +# activation: None or DataType +@pytest.mark.parametrize("size", ["TFC", "SFC", "LFC"]) +# weight bits +@pytest.mark.parametrize("wbits", [1]) +# act bits +@pytest.mark.parametrize("abits", [1, 2]) +def test_brevitas_fc_onnx_export_and_exec(size, wbits, abits): + nname = "%s_%dW%dA" % (size, wbits, abits) + finn_onnx = export_onnx_path + "/%s.onnx" % nname + fc = get_test_model_trained(size, wbits, abits) + bo.export_finn_onnx(fc, (1, 1, 28, 28), finn_onnx) + model = ModelWrapper(finn_onnx) + model = model.transform(InferShapes()) + model = model.transform(FoldConstants()) + # load one of the test vectors + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + # run using FINN-based execution + input_dict = {"0": nph.to_array(input_tensor)} + output_dict = oxe.execute_onnx(model, input_dict) + produced = output_dict[list(output_dict.keys())[0]] + # run using PyTorch/Brevitas + input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float() + assert input_tensor.shape == (1, 1, 28, 28) + # do forward pass in PyTorch/Brevitas + expected = fc.forward(input_tensor).detach().numpy() + assert np.isclose(produced, expected, atol=1e-3).all() diff --git a/tests/conftest.py b/tests/conftest.py index 3e2725c01f796129b51629e4bec8b494001015b2..7137c9395e015b3b17d2601444a48a298da9a34a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # -*- coding: utf-8 -*- """ Dummy conftest.py for finn. diff --git a/tests/core/test_basic_onnx_exec.py b/tests/core/test_basic_onnx_exec.py new file mode 100644 index 0000000000000000000000000000000000000000..a7b6da9965aa5912870812a8c1f8d6da2ee0d181 --- /dev/null +++ b/tests/core/test_basic_onnx_exec.py @@ -0,0 +1,55 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from pkgutil import get_data + +import numpy as np +import onnx +import onnx.numpy_helper as np_helper + +import finn.core.onnx_exec as oxe +from finn.core.modelwrapper import ModelWrapper +from finn.transformation.infer_shapes import InferShapes + + +def test_mnist_onnx_download_extract_run(): + # load the onnx model + raw_m = get_data("finn", "data/onnx/mnist-conv/model.onnx") + model = ModelWrapper(raw_m) + model = model.transform(InferShapes()) + # load one of the test vectors + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + raw_o = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/output_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + output_tensor = onnx.load_tensor_from_string(raw_o) + # run using FINN-based execution + input_dict = {"Input3": np_helper.to_array(input_tensor)} + output_dict = oxe.execute_onnx(model, input_dict) + assert np.isclose( + np_helper.to_array(output_tensor), output_dict["Plus214_Output_0"], atol=1e-3 + ).all() diff --git a/tests/test_custom_onnx_exec.py b/tests/core/test_custom_onnx_exec.py similarity index 77% rename from tests/test_custom_onnx_exec.py rename to tests/core/test_custom_onnx_exec.py index e1ff552572e8a6d4d55e204cd21f17e4984ce30d..29ef2ee560d498eba04845fc0a6051fd0cae14ab 100644 --- a/tests/test_custom_onnx_exec.py +++ b/tests/core/test_custom_onnx_exec.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import TensorProto, helper diff --git a/tests/core/test_datatypes.py b/tests/core/test_datatypes.py index af04f8eb5e1d9b818646f8c825ad21c4fea3ad74..f1d34923c5f8b05dc21a0d5b7781879deabae379 100644 --- a/tests/core/test_datatypes.py +++ b/tests/core/test_datatypes.py @@ -1,4 +1,30 @@ -# -*- coding: utf-8 -*- +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from finn.core.datatype import DataType diff --git a/tests/test_mixed_onnx_exec.py b/tests/core/test_mixed_onnx_exec.py similarity index 74% rename from tests/test_mixed_onnx_exec.py rename to tests/core/test_mixed_onnx_exec.py index 75170da3ad3a8c9b24814e171b8ccdfde4fb74cd..d8754105e3001f3ef40b1df23e4d5a22aa176ba4 100644 --- a/tests/test_mixed_onnx_exec.py +++ b/tests/core/test_mixed_onnx_exec.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import TensorProto, helper diff --git a/tests/core/test_modelwrapper.py b/tests/core/test_modelwrapper.py index 162278da13af9766b1ebbe081616c1865bec03e2..5d18de2d18157383a3c7882febfa752d72774572 100644 --- a/tests/core/test_modelwrapper.py +++ b/tests/core/test_modelwrapper.py @@ -1,24 +1,45 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from collections import Counter import brevitas.onnx as bo import numpy as np -import torch -from models.LFC import LFC from finn.core.modelwrapper import ModelWrapper +from finn.util.test import get_test_model_trained export_onnx_path = "test_output_lfc.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_lfc_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar" -) def test_modelwrapper(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) + lfc = get_test_model_trained("LFC", 1, 1) bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) assert model.check_all_tensor_shapes_specified() is False diff --git a/tests/test_multi_thresholding.py b/tests/custom_op/test_multi_thresholding.py similarity index 73% rename from tests/test_multi_thresholding.py rename to tests/custom_op/test_multi_thresholding.py index 36a348a26e7f7df779d525b9ee26cbe9e7f34f45..4f2b08675fdabb1bda49972c51892da92e1a0cdc 100644 --- a/tests/test_multi_thresholding.py +++ b/tests/custom_op/test_multi_thresholding.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from finn.custom_op.multithreshold import multithreshold diff --git a/tests/test_verify_custom_nodes.py b/tests/custom_op/test_verify_custom_nodes.py similarity index 76% rename from tests/test_verify_custom_nodes.py rename to tests/custom_op/test_verify_custom_nodes.py index 9146f9d30853e6c7a47436c100166a4fa9127e87..e9ea2da622c4f24be7c599a7a8071e859e14ee3d 100644 --- a/tests/test_verify_custom_nodes.py +++ b/tests/custom_op/test_verify_custom_nodes.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from onnx import TensorProto, helper from finn.analysis.verify_custom_nodes import verify_nodes diff --git a/tests/test_xnorpopcountmatmul.py b/tests/custom_op/test_xnorpopcountmatmul.py similarity index 67% rename from tests/test_xnorpopcountmatmul.py rename to tests/custom_op/test_xnorpopcountmatmul.py index fc174a098cb901869065f2ec92c8fee1e65c1968..6b59283667ac05f569e5c3d80dbfc1530616d045 100644 --- a/tests/test_xnorpopcountmatmul.py +++ b/tests/custom_op/test_xnorpopcountmatmul.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from pkgutil import get_data @@ -6,8 +34,6 @@ import numpy as np import onnx import onnx.helper as helper import onnx.numpy_helper as nph -import torch -from models.LFC import LFC from onnx import TensorProto import finn.core.onnx_exec as oxe @@ -19,6 +45,7 @@ from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeN from finn.transformation.infer_datatypes import InferDataTypes from finn.transformation.infer_shapes import InferShapes from finn.transformation.streamline.sign_to_thres import ConvertSignToThres +from finn.util.test import get_test_model_trained export_onnx_path = "test_output_lfc.onnx" # TODO get from config instead, hardcoded to Docker path for now @@ -63,9 +90,7 @@ def test_xnorpopcountmatmul(): def test_convert_bipolar_matmul_to_xnorpopcountmatmul(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) + lfc = get_test_model_trained("LFC", 1, 1) bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) diff --git a/tests/end2end/test_end2end_tfc_w1a1.py b/tests/end2end/test_end2end_tfc_w1a1.py new file mode 100644 index 0000000000000000000000000000000000000000..1c2a3b799d423cb0779da3388e71f5ac8cb1b380 --- /dev/null +++ b/tests/end2end/test_end2end_tfc_w1a1.py @@ -0,0 +1,299 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +from pkgutil import get_data + +import pytest + +import numpy as np + +# as of Feb'20 there is a bug that segfaults ONNX shape inference if we +# import pytorch before onnx, so we make sure to import onnx first +import onnx # NOQA +import onnx.numpy_helper as nph + +import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls +import finn.transformation.streamline.absorb as absorb +from finn.core.modelwrapper import ModelWrapper +from finn.core.onnx_exec import execute_onnx +from finn.custom_op.registry import getCustomOp +from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount +from finn.transformation.fold_constants import FoldConstants +from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen +from finn.transformation.fpgadataflow.codegen_ipstitch import CodeGen_ipstitch +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim +from finn.transformation.fpgadataflow.compile import Compile +from finn.transformation.fpgadataflow.create_dataflow_partition import ( + CreateDataflowPartition, +) +from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen +from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker +from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ +from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver +from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject +from finn.transformation.fpgadataflow.replace_verilog_relpaths import ( + ReplaceVerilogRelPaths, +) +from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode +from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject +from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames +from finn.transformation.infer_datatypes import InferDataTypes +from finn.transformation.infer_shapes import InferShapes +from finn.transformation.streamline import Streamline +from finn.transformation.streamline.round_thresholds import RoundAndClipThresholds +from finn.util.basic import pynq_part_map +from finn.util.test import get_test_model_trained + +build_dir = "/tmp/" + os.environ["FINN_INST_NAME"] +test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1") +test_fpga_part = pynq_part_map[test_pynq_board] +target_clk_ns = 5 + + +def test_end2end_tfc_w1a1_export(): + import brevitas.onnx as bo + + tfc = get_test_model_trained("TFC", 1, 1) + bo.export_finn_onnx( + tfc, (1, 1, 28, 28), build_dir + "/end2end_tfc_w1a1_export.onnx" + ) + + +def test_end2end_tfc_w1a1_import_and_tidy(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_export.onnx") + model = model.transform(InferShapes()) + model = model.transform(FoldConstants()) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(GiveReadableTensorNames()) + model = model.transform(InferDataTypes()) + model.save(build_dir + "/end2end_tfc_w1a1_tidy.onnx") + + +def test_end2end_tfc_w1a1_streamline(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_tidy.onnx") + model = model.transform(Streamline()) + model.save(build_dir + "/end2end_tfc_w1a1_streamlined.onnx") + + +def test_end2end_tfc_w1a1_convert_to_hls_layers(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_streamlined.onnx") + model = model.transform(ConvertBipolarMatMulToXnorPopcount()) + model = model.transform(absorb.AbsorbAddIntoMultiThreshold()) + model = model.transform(absorb.AbsorbMulIntoMultiThreshold()) + model = model.transform(RoundAndClipThresholds()) + model = model.transform(to_hls.InferBinaryStreamingFCLayer()) + model.save(build_dir + "/end2end_tfc_w1a1_hls_layers.onnx") + + +def test_end2end_tfc_w1a1_create_dataflow_partition(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_hls_layers.onnx") + parent_model = model.transform(CreateDataflowPartition()) + parent_model.save(build_dir + "/end2end_tfc_w1a1_dataflow_parent.onnx") + sdp_node = getCustomOp(parent_model.graph.node[2]) + dataflow_model_filename = sdp_node.get_nodeattr("model") + dataflow_model = ModelWrapper(dataflow_model_filename) + dataflow_model.save(build_dir + "/end2end_tfc_w1a1_dataflow_model.onnx") + + +def test_end2end_tfc_w1a1_fold_and_tlastmarker(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_dataflow_model.onnx") + fc0 = model.graph.node[0] + fc1 = model.graph.node[1] + fc2 = model.graph.node[2] + fc3 = model.graph.node[3] + fc0w = getCustomOp(fc0) + fc1w = getCustomOp(fc1) + fc2w = getCustomOp(fc2) + fc3w = getCustomOp(fc3) + fc0w.set_nodeattr("inFIFODepth", 50) + fc0w.set_nodeattr("SIMD", 16) + fc0w.set_nodeattr("PE", 16) + fc0w.set_nodeattr("outFIFODepth", 4) + fc1w.set_nodeattr("SIMD", 16) + fc1w.set_nodeattr("PE", 16) + fc1w.set_nodeattr("outFIFODepth", 4) + fc2w.set_nodeattr("SIMD", 16) + fc2w.set_nodeattr("PE", 16) + fc2w.set_nodeattr("outFIFODepth", 4) + fc3w.set_nodeattr("SIMD", 16) + fc3w.set_nodeattr("PE", 10) + fc3w.set_nodeattr("outFIFODepth", 50) + model = model.transform(InsertTLastMarker()) + model.save(build_dir + "/end2end_tfc_w1a1_folded.onnx") + + +def test_end2end_tfc_w1a1_gen_hls_ip(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_folded.onnx") + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(CodeGen_ipgen(test_fpga_part, target_clk_ns)) + model = model.transform(HLSSynth_IPGen()) + model.save(build_dir + "/end2end_tfc_w1a1_ipgen.onnx") + + +def test_end2end_tfc_w1a1_ip_stitch(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_ipgen.onnx") + model = model.transform(ReplaceVerilogRelPaths()) + model = model.transform(CodeGen_ipstitch(test_fpga_part)) + model.save(build_dir + "/end2end_tfc_w1a1_ipstitch.onnx") + + +def test_end2end_tfc_w1a1_verify_dataflow_part(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_ipstitch.onnx") + x = np.zeros((1, 784), dtype=np.float32) + inp_name = model.graph.input[0].name + out_name = model.graph.output[0].name + inp_dict = {inp_name: x} + # npysim + model = model.transform(CodeGen_npysim()) + model = model.transform(Compile()) + model = model.transform(SetExecMode("npysim")) + model.save(build_dir + "/end2end_tfc_w1a1_ipstitch_npysim.onnx") + ret_npysim = execute_onnx(model, inp_dict, True) + res_npysim = ret_npysim[out_name] + # node-by-node rtlsim + model = model.transform(SetExecMode("rtlsim")) + getCustomOp(model.graph.node[0]).set_nodeattr("rtlsim_trace", "default") + getCustomOp(model.graph.node[1]).set_nodeattr("rtlsim_trace", "default") + getCustomOp(model.graph.node[2]).set_nodeattr("rtlsim_trace", "default") + getCustomOp(model.graph.node[3]).set_nodeattr("rtlsim_trace", "default") + model.save(build_dir + "/end2end_tfc_w1a1_ipstitch_nodebynode_rtlsim.onnx") + ret_rtlsim_nodebynode = execute_onnx(model, inp_dict, True) + res_rtlsim_nodebynode = ret_rtlsim_nodebynode[out_name] + # whole-network (ip-stitched) rtlsim + model.set_metadata_prop("exec_mode", "rtlsim") + model.set_metadata_prop("rtlsim_trace", "whole_trace.vcd") + model.save(build_dir + "/end2end_tfc_w1a1_ipstitch_whole_rtlsim.onnx") + ret_rtlsim_whole = execute_onnx(model, inp_dict, True) + res_rtlsim_whole = ret_rtlsim_whole[out_name] + assert np.isclose(res_npysim, res_rtlsim_nodebynode).all() + assert np.isclose(res_npysim, res_rtlsim_whole).all() + + +def test_end2end_tfc_w1a1_verify_all(): + # use the streamlined model as the "golden" model for right answers + golden = ModelWrapper(build_dir + "/end2end_tfc_w1a1_streamlined.onnx") + iname = golden.graph.input[0].name + oname = golden.graph.output[0].name + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + x = nph.to_array(input_tensor) + # x = np.zeros(ishape, dtype=np.float32) + ret_golden = execute_onnx(golden, {iname: x}, True) + y_golden = ret_golden[oname] + # set up parent+child graph to test + # we'll use models from the previous step as the child model + parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_dataflow_parent.onnx") + iname = parent_model.graph.input[0].name + oname = parent_model.graph.output[0].name + # produce results with npysim + sdp_node = getCustomOp(parent_model.graph.node[2]) + sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a1_ipstitch_npysim.onnx") + ret_npysim = execute_onnx(parent_model, {iname: x}, True) + y_npysim = ret_npysim[oname] + # produce results with node-by-node rtlsim + sdp_node.set_nodeattr( + "model", build_dir + "/end2end_tfc_w1a1_ipstitch_nodebynode_rtlsim.onnx" + ) + ret_nodebynode_rtlsim = execute_onnx(parent_model, {iname: x}, True) + y_nodebynode_rtlsim = ret_nodebynode_rtlsim[oname] + # produce results with whole-network (stitched ip) rtlsim + sdp_node.set_nodeattr( + "model", build_dir + "/end2end_tfc_w1a1_ipstitch_whole_rtlsim.onnx" + ) + ret_whole_rtlsim = execute_onnx(parent_model, {iname: x}, True) + y_whole_rtlsim = ret_whole_rtlsim[oname] + assert np.isclose(y_golden, y_npysim).all() + assert np.isclose(y_golden, y_nodebynode_rtlsim).all() + assert np.isclose(y_golden, y_whole_rtlsim).all() + + +def test_end2end_tfc_w1a1_make_pynq_proj(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_ipstitch.onnx") + model = model.transform(MakePYNQProject(test_pynq_board)) + model.save(build_dir + "/end2end_tfc_w1a1_pynq_project.onnx") + + +def test_end2end_tfc_w1a1_synth_pynq_project(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_pynq_project.onnx") + model = model.transform(SynthPYNQProject()) + model.save(build_dir + "/end2end_tfc_w1a1_synth.onnx") + + +def test_end2end_tfc_w1a1_make_driver(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_synth.onnx") + model = model.transform(MakePYNQDriver()) + model.save(build_dir + "/end2end_tfc_w1a1_pynq_driver.onnx") + + +def test_end2end_tfc_w1a1_deploy_on_pynq(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_pynq_driver.onnx") + try: + ip = os.environ["PYNQ_IP"] # no fault for this one; skip if not defined + if ip == "": + pytest.skip("PYNQ board IP address not specified") + username = os.getenv("PYNQ_USERNAME", "xilinx") + password = os.getenv("PYNQ_PASSWORD", "xilinx") + target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn") + model = model.transform(DeployToPYNQ(ip, username, password, target_dir)) + # save the model to be able to link it to the parent + model.save(build_dir + "/end2end_tfc_w1a1_pynq_deploy.onnx") + except KeyError: + pytest.skip("PYNQ board IP address not specified") + + +def test_end2end_tfc_w1a1_run_on_pynq(): + # use the streamlined model as the "golden" model for right answers + golden = ModelWrapper(build_dir + "/end2end_tfc_w1a1_streamlined.onnx") + iname = golden.graph.input[0].name + oname = golden.graph.output[0].name + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + x = nph.to_array(input_tensor) + # x = np.zeros(ishape, dtype=np.float32) + # run using FINN-based execution + ret_golden = execute_onnx(golden, {iname: x}, True) + y_golden = ret_golden[oname] + # set up parent+child graph to test + # we'll use models from the previous step as the child model + parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a1_dataflow_parent.onnx") + iname = parent_model.graph.input[0].name + oname = parent_model.graph.output[0].name + try: + ip = os.environ["PYNQ_IP"] # NOQA + if ip == "": + pytest.skip("PYNQ board IP address not specified") + # produce results with npysim + sdp_node = getCustomOp(parent_model.graph.node[2]) + sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a1_pynq_deploy.onnx") + ret = execute_onnx(parent_model, {iname: x}, True) + y = ret[oname] + assert np.isclose(y, y_golden).all() + + except KeyError: + pytest.skip("PYNQ board IP address not specified") diff --git a/tests/end2end/test_end2end_tfc_w1a2.py b/tests/end2end/test_end2end_tfc_w1a2.py new file mode 100644 index 0000000000000000000000000000000000000000..6e2505c78cf3e32e99b5d798323b20d558312ee3 --- /dev/null +++ b/tests/end2end/test_end2end_tfc_w1a2.py @@ -0,0 +1,297 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +from pkgutil import get_data + +import pytest + +import numpy as np + +# as of Feb'20 there is a bug that segfaults ONNX shape inference if we +# import pytorch before onnx, so we make sure to import onnx first +import onnx # NOQA +import onnx.numpy_helper as nph + +import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls +from finn.core.modelwrapper import ModelWrapper +from finn.core.onnx_exec import execute_onnx +from finn.custom_op.registry import getCustomOp +from finn.transformation.fold_constants import FoldConstants +from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen +from finn.transformation.fpgadataflow.codegen_ipstitch import CodeGen_ipstitch +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim +from finn.transformation.fpgadataflow.compile import Compile +from finn.transformation.fpgadataflow.create_dataflow_partition import ( + CreateDataflowPartition, +) +from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen +from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker +from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ +from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver +from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject +from finn.transformation.fpgadataflow.replace_verilog_relpaths import ( + ReplaceVerilogRelPaths, +) +from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode +from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject +from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames +from finn.transformation.infer_datatypes import InferDataTypes +from finn.transformation.infer_shapes import InferShapes +from finn.transformation.streamline import Streamline +from finn.util.basic import pynq_part_map +from finn.util.test import get_test_model_trained + +build_dir = "/tmp/" + os.environ["FINN_INST_NAME"] +test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1") +test_fpga_part = pynq_part_map[test_pynq_board] +target_clk_ns = 5 + + +def test_end2end_tfc_w1a2_export(): + import brevitas.onnx as bo + + tfc = get_test_model_trained("TFC", 1, 2) + bo.export_finn_onnx( + tfc, (1, 1, 28, 28), build_dir + "/end2end_tfc_w1a2_export.onnx" + ) + + +def test_end2end_tfc_w1a2_import_and_tidy(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_export.onnx") + model = model.transform(InferShapes()) + model = model.transform(FoldConstants()) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(GiveReadableTensorNames()) + model = model.transform(InferDataTypes()) + model.save(build_dir + "/end2end_tfc_w1a2_tidy.onnx") + + +def test_end2end_tfc_w1a2_streamline(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_tidy.onnx") + model = model.transform(Streamline()) + model.save(build_dir + "/end2end_tfc_w1a2_streamlined.onnx") + + +def test_end2end_tfc_w1a2_convert_to_hls_layers(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_streamlined.onnx") + # model = model.transform(ConvertBipolarMatMulToXnorPopcount()) + # model = model.transform(absorb.AbsorbAddIntoMultiThreshold()) + # model = model.transform(absorb.AbsorbMulIntoMultiThreshold()) + # model = model.transform(RoundAndClipThresholds()) + # model = model.transform(to_hls.InferBinaryStreamingFCLayer()) + model = model.transform(to_hls.InferQuantizedStreamingFCLayer()) + model.save(build_dir + "/end2end_tfc_w1a2_hls_layers.onnx") + + +def test_end2end_tfc_w1a2_create_dataflow_partition(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_hls_layers.onnx") + parent_model = model.transform(CreateDataflowPartition()) + parent_model.save(build_dir + "/end2end_tfc_w1a2_dataflow_parent.onnx") + sdp_node = getCustomOp(parent_model.graph.node[2]) + dataflow_model_filename = sdp_node.get_nodeattr("model") + dataflow_model = ModelWrapper(dataflow_model_filename) + dataflow_model.save(build_dir + "/end2end_tfc_w1a2_dataflow_model.onnx") + + +def test_end2end_tfc_w1a2_fold_and_tlastmarker(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_dataflow_model.onnx") + fc0 = model.graph.node[0] + fc1 = model.graph.node[1] + fc2 = model.graph.node[2] + fc3 = model.graph.node[3] + fc0w = getCustomOp(fc0) + fc1w = getCustomOp(fc1) + fc2w = getCustomOp(fc2) + fc3w = getCustomOp(fc3) + fc0w.set_nodeattr("inFIFODepth", 50) + fc0w.set_nodeattr("SIMD", 8) + fc0w.set_nodeattr("PE", 16) + fc0w.set_nodeattr("outFIFODepth", 4) + fc1w.set_nodeattr("SIMD", 16) + fc1w.set_nodeattr("PE", 16) + fc1w.set_nodeattr("outFIFODepth", 4) + fc2w.set_nodeattr("SIMD", 16) + fc2w.set_nodeattr("PE", 16) + fc2w.set_nodeattr("outFIFODepth", 4) + fc3w.set_nodeattr("SIMD", 16) + fc3w.set_nodeattr("PE", 10) + fc3w.set_nodeattr("outFIFODepth", 50) + model = model.transform(InsertTLastMarker()) + model.save(build_dir + "/end2end_tfc_w1a2_folded.onnx") + + +def test_end2end_tfc_w1a2_gen_hls_ip(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_folded.onnx") + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(CodeGen_ipgen(test_fpga_part, target_clk_ns)) + model = model.transform(HLSSynth_IPGen()) + model.save(build_dir + "/end2end_tfc_w1a2_ipgen.onnx") + + +def test_end2end_tfc_w1a2_ip_stitch(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_ipgen.onnx") + model = model.transform(ReplaceVerilogRelPaths()) + model = model.transform(CodeGen_ipstitch(test_fpga_part)) + model.save(build_dir + "/end2end_tfc_w1a2_ipstitch.onnx") + + +def test_end2end_tfc_w1a2_verify_dataflow_part(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_ipstitch.onnx") + x = np.zeros((1, 784), dtype=np.float32) + inp_name = model.graph.input[0].name + out_name = model.graph.output[0].name + inp_dict = {inp_name: x} + # npysim + model = model.transform(CodeGen_npysim()) + model = model.transform(Compile()) + model = model.transform(SetExecMode("npysim")) + model.save(build_dir + "/end2end_tfc_w1a2_ipstitch_npysim.onnx") + ret_npysim = execute_onnx(model, inp_dict, True) + res_npysim = ret_npysim[out_name] + # node-by-node rtlsim + model = model.transform(SetExecMode("rtlsim")) + getCustomOp(model.graph.node[0]).set_nodeattr("rtlsim_trace", "default") + getCustomOp(model.graph.node[1]).set_nodeattr("rtlsim_trace", "default") + getCustomOp(model.graph.node[2]).set_nodeattr("rtlsim_trace", "default") + getCustomOp(model.graph.node[3]).set_nodeattr("rtlsim_trace", "default") + model.save(build_dir + "/end2end_tfc_w1a2_ipstitch_nodebynode_rtlsim.onnx") + ret_rtlsim_nodebynode = execute_onnx(model, inp_dict, True) + res_rtlsim_nodebynode = ret_rtlsim_nodebynode[out_name] + # whole-network (ip-stitched) rtlsim + model.set_metadata_prop("exec_mode", "rtlsim") + model.set_metadata_prop("rtlsim_trace", "whole_trace.vcd") + model.save(build_dir + "/end2end_tfc_w1a2_ipstitch_whole_rtlsim.onnx") + ret_rtlsim_whole = execute_onnx(model, inp_dict, True) + res_rtlsim_whole = ret_rtlsim_whole[out_name] + assert np.isclose(res_npysim, res_rtlsim_nodebynode).all() + assert np.isclose(res_npysim, res_rtlsim_whole).all() + + +def test_end2end_tfc_w1a2_verify_all(): + # use the streamlined model as the "golden" model for right answers + golden = ModelWrapper(build_dir + "/end2end_tfc_w1a2_streamlined.onnx") + iname = golden.graph.input[0].name + oname = golden.graph.output[0].name + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + x = nph.to_array(input_tensor) + # x = np.zeros(ishape, dtype=np.float32) + ret_golden = execute_onnx(golden, {iname: x}, True) + y_golden = ret_golden[oname] + # set up parent+child graph to test + # we'll use models from the previous step as the child model + parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_dataflow_parent.onnx") + iname = parent_model.graph.input[0].name + oname = parent_model.graph.output[0].name + # produce results with npysim + sdp_node = getCustomOp(parent_model.graph.node[2]) + sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a2_ipstitch_npysim.onnx") + ret_npysim = execute_onnx(parent_model, {iname: x}, True) + y_npysim = ret_npysim[oname] + # produce results with node-by-node rtlsim + sdp_node.set_nodeattr( + "model", build_dir + "/end2end_tfc_w1a2_ipstitch_nodebynode_rtlsim.onnx" + ) + ret_nodebynode_rtlsim = execute_onnx(parent_model, {iname: x}, True) + y_nodebynode_rtlsim = ret_nodebynode_rtlsim[oname] + # produce results with whole-network (stitched ip) rtlsim + sdp_node.set_nodeattr( + "model", build_dir + "/end2end_tfc_w1a2_ipstitch_whole_rtlsim.onnx" + ) + ret_whole_rtlsim = execute_onnx(parent_model, {iname: x}, True) + y_whole_rtlsim = ret_whole_rtlsim[oname] + assert np.isclose(y_golden, y_npysim).all() + assert np.isclose(y_golden, y_nodebynode_rtlsim).all() + assert np.isclose(y_golden, y_whole_rtlsim).all() + + +def test_end2end_tfc_w1a2_make_pynq_proj(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_ipstitch.onnx") + model = model.transform(MakePYNQProject(test_pynq_board)) + model.save(build_dir + "/end2end_tfc_w1a2_pynq_project.onnx") + + +def test_end2end_tfc_w1a2_synth_pynq_project(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_pynq_project.onnx") + model = model.transform(SynthPYNQProject()) + model.save(build_dir + "/end2end_tfc_w1a2_synth.onnx") + + +def test_end2end_tfc_w1a2_make_driver(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_synth.onnx") + model = model.transform(MakePYNQDriver()) + model.save(build_dir + "/end2end_tfc_w1a2_pynq_driver.onnx") + + +def test_end2end_tfc_w1a2_deploy_on_pynq(): + model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_pynq_driver.onnx") + try: + ip = os.environ["PYNQ_IP"] # no fault for this one; skip if not defined + if ip == "": + pytest.skip("PYNQ board IP address not specified") + username = os.getenv("PYNQ_USERNAME", "xilinx") + password = os.getenv("PYNQ_PASSWORD", "xilinx") + target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn") + model = model.transform(DeployToPYNQ(ip, username, password, target_dir)) + # save the model to be able to link it to the parent + model.save(build_dir + "/end2end_tfc_w1a2_pynq_deploy.onnx") + except KeyError: + pytest.skip("PYNQ board IP address not specified") + + +def test_end2end_tfc_w1a2_run_on_pynq(): + # use the streamlined model as the "golden" model for right answers + golden = ModelWrapper(build_dir + "/end2end_tfc_w1a2_streamlined.onnx") + iname = golden.graph.input[0].name + oname = golden.graph.output[0].name + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + x = nph.to_array(input_tensor) + # x = np.zeros(ishape, dtype=np.float32) + # run using FINN-based execution + ret_golden = execute_onnx(golden, {iname: x}, True) + y_golden = ret_golden[oname] + # set up parent+child graph to test + # we'll use models from the previous step as the child model + parent_model = ModelWrapper(build_dir + "/end2end_tfc_w1a2_dataflow_parent.onnx") + iname = parent_model.graph.input[0].name + oname = parent_model.graph.output[0].name + try: + ip = os.environ["PYNQ_IP"] # NOQA + if ip == "": + pytest.skip("PYNQ board IP address not specified") + # produce results with npysim + sdp_node = getCustomOp(parent_model.graph.node[2]) + sdp_node.set_nodeattr("model", build_dir + "/end2end_tfc_w1a2_pynq_deploy.onnx") + ret = execute_onnx(parent_model, {iname: x}, True) + y = ret[oname] + assert np.isclose(y, y_golden).all() + + except KeyError: + pytest.skip("PYNQ board IP address not specified") diff --git a/tests/fpgadataflow/test_code_gen_trafo.py b/tests/fpgadataflow/test_code_gen_trafo.py index 533710605e2bc514ba0fb0c8784c378d07451951..a9b9cd84de77d3393ff76760528ea53780482bfb 100644 --- a/tests/fpgadataflow/test_code_gen_trafo.py +++ b/tests/fpgadataflow/test_code_gen_trafo.py @@ -1,12 +1,39 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from onnx import TensorProto, helper -import finn.core.utils as util +import finn.util.basic as util from finn.core.datatype import DataType from finn.core.modelwrapper import ModelWrapper -from finn.transformation.fpgadataflow.cleanup import CleanUp -from finn.transformation.fpgadataflow.codegen import CodeGen +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim def test_code_gen_trafo(): @@ -50,9 +77,9 @@ def test_code_gen_trafo(): W = util.gen_finn_dt_tensor(wdt, (mw, mh)) model.set_initializer("weights", W) - model = model.transform(CodeGen()) + model = model.transform(CodeGen_npysim()) for node in model.graph.node: - code_gen_attribute = util.get_by_name(node.attribute, "code_gen_dir") + code_gen_attribute = util.get_by_name(node.attribute, "code_gen_dir_npysim") tmp_dir = code_gen_attribute.s.decode("UTF-8") assert os.path.isdir( tmp_dir @@ -66,4 +93,3 @@ def test_code_gen_trafo(): op type {} is empty!""".format( node.op_type ) - model = model.transform(CleanUp()) diff --git a/tests/fpgadataflow/test_compilation_trafo.py b/tests/fpgadataflow/test_compilation_trafo.py index f84ce34b54b3496f7e277e55ac574124e09c25d3..7a50888545b7c464d187f77122f89b255e9063da 100644 --- a/tests/fpgadataflow/test_compilation_trafo.py +++ b/tests/fpgadataflow/test_compilation_trafo.py @@ -1,12 +1,39 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from onnx import TensorProto, helper -import finn.core.utils as util +import finn.util.basic as util from finn.core.datatype import DataType from finn.core.modelwrapper import ModelWrapper -from finn.transformation.fpgadataflow.cleanup import CleanUp -from finn.transformation.fpgadataflow.codegen import CodeGen +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim from finn.transformation.fpgadataflow.compile import Compile @@ -51,7 +78,7 @@ def test_compilation_trafo(): W = util.gen_finn_dt_tensor(wdt, (mw, mh)) model.set_initializer("weights", W) - model = model.transform(CodeGen()) + model = model.transform(CodeGen_npysim()) model = model.transform(Compile()) for node in model.graph.node: compilation_attribute = util.get_by_name(node.attribute, "executable_path") @@ -63,4 +90,3 @@ def test_compilation_trafo(): op type {} does not exist!""".format( node.op_type ) - model = model.transform(CleanUp()) diff --git a/tests/fpgadataflow/test_convert_to_hls_layers.py b/tests/fpgadataflow/test_convert_to_hls_layers.py index 32792e1364229199286a7012105442f3bbfb05df..cc9b9515b46ca9d51045560b70baaab4fe9f66f3 100644 --- a/tests/fpgadataflow/test_convert_to_hls_layers.py +++ b/tests/fpgadataflow/test_convert_to_hls_layers.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from pkgutil import get_data @@ -6,34 +34,29 @@ import numpy as np import onnx import onnx.numpy_helper as nph import torch -from models.LFC import LFC import finn.core.onnx_exec as oxe import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls import finn.transformation.streamline.absorb as absorb from finn.core.modelwrapper import ModelWrapper -from finn.custom_op.fpgadataflow.streamingfclayer_batch import StreamingFCLayer_Batch +from finn.custom_op.registry import getCustomOp from finn.transformation.bipolar_to_xnor import ConvertBipolarMatMulToXnorPopcount from finn.transformation.fold_constants import FoldConstants -from finn.transformation.fpgadataflow.codegen import CodeGen +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim from finn.transformation.fpgadataflow.compile import Compile +from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames from finn.transformation.infer_shapes import InferShapes from finn.transformation.streamline import Streamline from finn.transformation.streamline.round_thresholds import RoundAndClipThresholds +from finn.util.test import get_test_model_trained -export_onnx_path = "test_output_lfc.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_lfc_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar" -) +export_onnx_path = "test_output_tfc.onnx" -def test_convert_to_hls_layers_lfc_w1a1(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) +def test_convert_to_hls_layers_tfc_w1a1(): + tfc = get_test_model_trained("TFC", 1, 1) + bo.export_finn_onnx(tfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) model = model.transform(FoldConstants()) @@ -48,42 +71,43 @@ def test_convert_to_hls_layers_lfc_w1a1(): fc0 = model.graph.node[2] assert fc0.op_type == "StreamingFCLayer_Batch" assert model.get_tensor_shape(fc0.input[0]) == [1, 784] - assert model.get_tensor_shape(fc0.input[1]) == [784, 1024] - assert model.get_tensor_shape(fc0.input[2]) == [1024, 1] + assert model.get_tensor_shape(fc0.input[1]) == [784, 64] + assert model.get_tensor_shape(fc0.input[2]) == [64, 1] fc1 = model.graph.node[3] assert fc1.op_type == "StreamingFCLayer_Batch" - assert model.get_tensor_shape(fc1.input[0]) == [1, 1024] - assert model.get_tensor_shape(fc1.input[1]) == [1024, 1024] - assert model.get_tensor_shape(fc1.input[2]) == [1024, 1] + assert model.get_tensor_shape(fc1.input[0]) == [1, 64] + assert model.get_tensor_shape(fc1.input[1]) == [64, 64] + assert model.get_tensor_shape(fc1.input[2]) == [64, 1] fc2 = model.graph.node[4] assert fc2.op_type == "StreamingFCLayer_Batch" - assert model.get_tensor_shape(fc2.input[0]) == [1, 1024] - assert model.get_tensor_shape(fc2.input[1]) == [1024, 1024] - assert model.get_tensor_shape(fc2.input[2]) == [1024, 1] + assert model.get_tensor_shape(fc2.input[0]) == [1, 64] + assert model.get_tensor_shape(fc2.input[1]) == [64, 64] + assert model.get_tensor_shape(fc2.input[2]) == [64, 1] fc3 = model.graph.node[5] assert fc3.op_type == "StreamingFCLayer_Batch" - assert model.get_tensor_shape(fc3.input[0]) == [1, 1024] - assert model.get_tensor_shape(fc3.input[1]) == [1024, 10] + assert model.get_tensor_shape(fc3.input[0]) == [1, 64] + assert model.get_tensor_shape(fc3.input[1]) == [64, 10] os.remove(export_onnx_path) - fc0w = StreamingFCLayer_Batch(fc0) + fc0w = getCustomOp(fc0) fc0w.set_nodeattr("SIMD", 784) - fc0w.set_nodeattr("PE", 32) + fc0w.set_nodeattr("PE", 16) - fc1w = StreamingFCLayer_Batch(fc1) - fc1w.set_nodeattr("SIMD", 1024) - fc1w.set_nodeattr("PE", 32) + fc1w = getCustomOp(fc1) + fc1w.set_nodeattr("SIMD", 16) + fc1w.set_nodeattr("PE", 16) - fc2w = StreamingFCLayer_Batch(fc2) - fc2w.set_nodeattr("SIMD", 1024) - fc2w.set_nodeattr("PE", 32) + fc2w = getCustomOp(fc2) + fc2w.set_nodeattr("SIMD", 16) + fc2w.set_nodeattr("PE", 16) - fc3w = StreamingFCLayer_Batch(fc3) - fc3w.set_nodeattr("SIMD", 1024) + fc3w = getCustomOp(fc3) + fc3w.set_nodeattr("SIMD", 16) fc3w.set_nodeattr("PE", 10) - model = model.transform(CodeGen()) + model = model.transform(CodeGen_npysim()) model = model.transform(Compile()) + model = model.transform(SetExecMode("npysim")) raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") input_tensor = onnx.load_tensor_from_string(raw_i) @@ -95,5 +119,72 @@ def test_convert_to_hls_layers_lfc_w1a1(): input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float() assert input_tensor.shape == (1, 1, 28, 28) # do forward pass in PyTorch/Brevitas - expected = lfc.forward(input_tensor).detach().numpy() + expected = tfc.forward(input_tensor).detach().numpy() assert np.isclose(produced, expected, atol=1e-3).all() + + +def test_convert_to_hls_layers_tfc_w1a2(): + tfc = get_test_model_trained("TFC", 1, 2) + bo.export_finn_onnx(tfc, (1, 1, 28, 28), export_onnx_path) + model = ModelWrapper(export_onnx_path) + model = model.transform(InferShapes()) + model = model.transform(FoldConstants()) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(GiveReadableTensorNames()) + model = model.transform(Streamline()) + from finn.transformation.fpgadataflow.convert_to_hls_layers import ( + InferQuantizedStreamingFCLayer, + ) + + model = model.transform(InferQuantizedStreamingFCLayer()) + + fc0 = model.graph.node[2] + assert fc0.op_type == "StreamingFCLayer_Batch" + assert model.get_tensor_shape(fc0.input[0]) == [1, 784] + assert model.get_tensor_shape(fc0.input[1]) == [784, 64] + assert model.get_tensor_shape(fc0.input[2]) == [64, 2] + fc1 = model.graph.node[3] + assert fc1.op_type == "StreamingFCLayer_Batch" + assert model.get_tensor_shape(fc1.input[0]) == [1, 64] + assert model.get_tensor_shape(fc1.input[1]) == [64, 64] + assert model.get_tensor_shape(fc1.input[2]) == [64, 2] + fc2 = model.graph.node[4] + assert fc2.op_type == "StreamingFCLayer_Batch" + assert model.get_tensor_shape(fc2.input[0]) == [1, 64] + assert model.get_tensor_shape(fc2.input[1]) == [64, 64] + assert model.get_tensor_shape(fc2.input[2]) == [64, 2] + fc3 = model.graph.node[5] + assert fc3.op_type == "StreamingFCLayer_Batch" + assert model.get_tensor_shape(fc3.input[0]) == [1, 64] + assert model.get_tensor_shape(fc3.input[1]) == [64, 10] + fc0w = getCustomOp(fc0) + fc0w.set_nodeattr("SIMD", 784) + fc0w.set_nodeattr("PE", 16) + fc1w = getCustomOp(fc1) + fc1w.set_nodeattr("SIMD", 16) + fc1w.set_nodeattr("PE", 16) + fc2w = getCustomOp(fc2) + fc2w.set_nodeattr("SIMD", 16) + fc2w.set_nodeattr("PE", 16) + fc3w = getCustomOp(fc3) + fc3w.set_nodeattr("SIMD", 16) + fc3w.set_nodeattr("PE", 10) + model = model.transform(CodeGen_npysim()) + model = model.transform(Compile()) + model = model.transform(SetExecMode("npysim")) + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + # run using FINN-based execution + input_dict = {"global_in": nph.to_array(input_tensor)} + output_dict = oxe.execute_onnx(model, input_dict, True) + produced = output_dict[model.graph.output[0].name] + model = ModelWrapper(export_onnx_path) + model = model.transform(InferShapes()) + model = model.transform(FoldConstants()) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(GiveReadableTensorNames()) + model = model.transform(Streamline()) + golden_output_dict = oxe.execute_onnx(model, input_dict, True) + expected = golden_output_dict[model.graph.output[0].name] + assert np.isclose(produced, expected, atol=1e-3).all() + os.remove(export_onnx_path) diff --git a/tests/fpgadataflow/test_create_dataflow_partition.py b/tests/fpgadataflow/test_create_dataflow_partition.py new file mode 100644 index 0000000000000000000000000000000000000000..77e0ddeebf6080e1840d6014978a4c9b4a10b5c1 --- /dev/null +++ b/tests/fpgadataflow/test_create_dataflow_partition.py @@ -0,0 +1,74 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os.path +from pkgutil import get_data + +import pytest + +from finn.core.modelwrapper import ModelWrapper +from finn.custom_op.registry import getCustomOp +from finn.transformation.fpgadataflow.create_dataflow_partition import ( + CreateDataflowPartition, +) +from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker +from finn.util.basic import make_build_dir + +build_dir = make_build_dir("test_dataflow_partition_") + + +@pytest.mark.dependency() +def test_dataflow_partition_create(): + # load the onnx model + raw_m = get_data( + "finn", "data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx" + ) + model = ModelWrapper(raw_m) + model = model.transform(CreateDataflowPartition()) + assert model.graph.node[2].op_type == "StreamingDataflowPartition" + sdp_node = getCustomOp(model.graph.node[2]) + assert sdp_node.__class__.__name__ == "StreamingDataflowPartition" + assert os.path.isfile(sdp_node.get_nodeattr("model")) + model.save(build_dir + "/test_dataflow_partition_create.onnx") + + +@pytest.mark.dependency(depends=["test_dataflow_partition_create"]) +def test_dataflow_partition_tlastmarker(): + model = ModelWrapper(build_dir + "/test_dataflow_partition_create.onnx") + model_path = getCustomOp(model.graph.node[2]).get_nodeattr("model") + model = ModelWrapper(model_path) + model = model.transform(InsertTLastMarker()) + assert model.graph.node[-1].op_type == "TLastMarker" + assert model.graph.node[-1].domain == "finn" + tl_node = getCustomOp(model.graph.node[-1]) + assert tl_node.get_nodeattr("NumIters") == 1 + assert tl_node.get_nodeattr("StreamWidth") == 320 + assert tl_node.get_nodeattr("ElemWidth") == 32 + model.save(build_dir + "/test_dataflow_partition_tlastmarker.onnx") + model = model.transform(InsertTLastMarker()) + model.save(build_dir + "/test_dataflow_partition_tlastmarker2.onnx") diff --git a/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py b/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py index cb58f1ef541f24ae0e232c3808cf98022fbc6c64..d2740feb662faddf0286e46688cbfc48c36c6bd0 100644 --- a/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py +++ b/tests/fpgadataflow/test_fpgadataflow_convinputgenerator.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import pytest import numpy as np @@ -6,10 +34,13 @@ from onnx import TensorProto, helper import finn.core.onnx_exec as oxe from finn.core.datatype import DataType from finn.core.modelwrapper import ModelWrapper -from finn.core.utils import gen_finn_dt_tensor -from finn.transformation.fpgadataflow.cleanup import CleanUp -from finn.transformation.fpgadataflow.codegen import CodeGen +from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim from finn.transformation.fpgadataflow.compile import Compile +from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen +from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode +from finn.transformation.general import GiveUniqueNodeNames +from finn.util.basic import gen_finn_dt_tensor def get_im2col_indices(x_shape, k, stride): @@ -39,7 +70,6 @@ def im2col_indices(x, k, stride): cols = x[:, l, i, j] C = x.shape[1] - # cols = cols.transpose(0, 2, 1) cols = cols.transpose(1, 2, 0).reshape(k * k * C, -1) cols = cols.transpose(1, 0) @@ -131,21 +161,19 @@ def prepare_inputs(input_tensor, idt): # input dimension @pytest.mark.parametrize("ifm_dim", [4, 6, 8]) # input channels -@pytest.mark.parametrize("ifm_ch", [1, 2, 3, 4]) +@pytest.mark.parametrize("ifm_ch", [1]) # , 2, 3, 4]) # Stride @pytest.mark.parametrize("stride", [1, 2]) def test_fpgadataflow_slidingwindow(idt, k, ifm_dim, ifm_ch, stride): simd = ifm_ch - ofm_dim = int(((ifm_dim - k) / stride) + 1) x = gen_finn_dt_tensor(idt, (1, ifm_ch, ifm_dim, ifm_dim)) - # x_values = np model = make_single_slidingwindow_modelwrapper( k, ifm_ch, ifm_dim, ofm_dim, simd, stride, idt ) - - model = model.transform(CodeGen()) + model = model.transform(SetExecMode("npysim")) + model = model.transform(CodeGen_npysim()) model = model.transform(Compile()) # prepare input data @@ -158,5 +186,11 @@ def test_fpgadataflow_slidingwindow(idt, k, ifm_dim, ifm_ch, stride): oshape = y_produced.shape y_expected = y_expected.reshape(oshape) - assert (y_produced == y_expected).all() - model = model.transform(CleanUp()) + assert (y_produced == y_expected).all(), "npysim failed" + + model = model.transform(SetExecMode("rtlsim")) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(CodeGen_ipgen("xc7z020clg400-1", 5)) + model = model.transform(HLSSynth_IPGen()) + y_produced = oxe.execute_onnx(model, input_dict)["outp"] + assert (y_produced == y_expected).all(), "rtlsim failed" diff --git a/tests/fpgadataflow/test_fpgadataflow_fclayer.py b/tests/fpgadataflow/test_fpgadataflow_fclayer.py index 0df66c4af2cfbadead8e95322c433cf69d4d2715..93bf0750776331af236154d8f2a005913cbb1c33 100644 --- a/tests/fpgadataflow/test_fpgadataflow_fclayer.py +++ b/tests/fpgadataflow/test_fpgadataflow_fclayer.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import pytest import numpy as np @@ -5,13 +33,17 @@ from onnx import TensorProto, helper import finn.core.onnx_exec as oxe import finn.custom_op.xnorpopcount as xp +from finn.analysis.fpgadataflow.hls_synth_res_estimation import hls_synth_res_estimation from finn.core.datatype import DataType from finn.core.modelwrapper import ModelWrapper -from finn.core.utils import calculate_signed_dot_prod_range, gen_finn_dt_tensor from finn.custom_op.multithreshold import multithreshold -from finn.transformation.fpgadataflow.cleanup import CleanUp -from finn.transformation.fpgadataflow.codegen import CodeGen +from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim from finn.transformation.fpgadataflow.compile import Compile +from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen +from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode +from finn.transformation.general import GiveUniqueNodeNames +from finn.util.basic import calculate_signed_dot_prod_range, gen_finn_dt_tensor def make_single_fclayer_modelwrapper(W, pe, simd, wdt, idt, odt, T=None, tdt=None): @@ -110,7 +142,7 @@ def prepare_inputs(input_tensor, idt, wdt): @pytest.mark.parametrize("mw", [4]) # HLS matrix height (output features) @pytest.mark.parametrize("mh", [4]) -def test_fpgadataflow_fclayer(idt, wdt, act, nf, sf, mw, mh): +def test_fpgadataflow_fclayer_npysim(idt, wdt, act, nf, sf, mw, mh): if nf == -1: nf = mh if sf == -1: @@ -147,7 +179,8 @@ def test_fpgadataflow_fclayer(idt, wdt, act, nf, sf, mw, mh): else: tdt = DataType.INT32 model = make_single_fclayer_modelwrapper(W, pe, simd, wdt, idt, odt, T, tdt) - model = model.transform(CodeGen()) + model = model.transform(SetExecMode("npysim")) + model = model.transform(CodeGen_npysim()) model = model.transform(Compile()) # prepare input data input_dict = prepare_inputs(x, idt, wdt) @@ -168,5 +201,85 @@ def test_fpgadataflow_fclayer(idt, wdt, act, nf, sf, mw, mh): y_expected = y.reshape(oshape) # execute model y_produced = oxe.execute_onnx(model, input_dict)["outp"] - assert (y_produced.reshape(y_expected.shape) == y_expected).all() - model = model.transform(CleanUp()) + assert (y_produced.reshape(y_expected.shape) == y_expected).all(), "npysim failed" + + +# activation: None or DataType +@pytest.mark.parametrize("act", [None, DataType.BIPOLAR, DataType.INT2]) +# weight datatype +@pytest.mark.parametrize("wdt", [DataType.BIPOLAR, DataType.INT2]) +# input datatype +@pytest.mark.parametrize("idt", [DataType.BIPOLAR, DataType.INT2]) +# neuron folding, -1 is maximum possible +@pytest.mark.parametrize("nf", [-1, 2, 1]) +# synapse folding, -1 is maximum possible +@pytest.mark.parametrize("sf", [-1, 2, 1]) +# HLS matrix width (input features) +@pytest.mark.parametrize("mw", [4]) +# HLS matrix height (output features) +@pytest.mark.parametrize("mh", [4]) +def test_fpgadataflow_fclayer_rtlsim(idt, wdt, act, nf, sf, mw, mh): + if nf == -1: + nf = mh + if sf == -1: + sf = mw + pe = mh // nf + simd = mw // sf + assert mh % pe == 0 + assert mw % sf == 0 + # generate weights + W = gen_finn_dt_tensor(wdt, (mw, mh)) + # generate input data + x = gen_finn_dt_tensor(idt, (1, mw)) + if act is None: + # no activation, produce accumulators + T = None + tdt = None + if wdt == DataType.BIPOLAR and idt == DataType.BIPOLAR: + odt = DataType.UINT32 + else: + odt = DataType.INT32 + else: + odt = act + (min, max) = calculate_signed_dot_prod_range(idt, wdt, mw) + n_steps = act.get_num_possible_values() - 1 + T = np.random.randint(min, max - 1, (mh, n_steps)).astype(np.float32) + # provide non-decreasing thresholds + T = np.sort(T, axis=1) + # generate thresholds for activation + if wdt == DataType.BIPOLAR and idt == DataType.BIPOLAR: + tdt = DataType.UINT32 + # bias thresholds to be positive + T = np.ceil((T + mw) / 2) + assert (T >= 0).all() + else: + tdt = DataType.INT32 + model = make_single_fclayer_modelwrapper(W, pe, simd, wdt, idt, odt, T, tdt) + # prepare input data + input_dict = prepare_inputs(x, idt, wdt) + if wdt == DataType.BIPOLAR and idt == DataType.BIPOLAR: + # convert inputs to binary and use xnorpopcountmatmul + y = xp.xnorpopcountmatmul((x + 1) / 2, (W + 1) / 2) + else: + y = np.matmul(x, W) + if T is not None: + y = multithreshold(y, T) + if act == DataType.BIPOLAR: + # binary to bipolar + y = 2 * y - 1 + else: + # signed offset + y += act.min() + oshape = model.get_tensor_shape("outp") + y_expected = y.reshape(oshape) + # TODO split up into several dependent tests -- need to check how this + # works for parametrized tests... + model = model.transform(SetExecMode("rtlsim")) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(CodeGen_ipgen("xc7z020clg400-1", 5)) + model = model.transform(HLSSynth_IPGen()) + y_produced = oxe.execute_onnx(model, input_dict)["outp"] + assert (y_produced.reshape(y_expected.shape) == y_expected).all(), "rtlsim failed" + + hls_synt_res_est = model.analysis(hls_synth_res_estimation) + assert "StreamingFCLayer_Batch_0" in hls_synt_res_est diff --git a/tests/fpgadataflow/test_fpgadataflow_ip_stitch.py b/tests/fpgadataflow/test_fpgadataflow_ip_stitch.py new file mode 100644 index 0000000000000000000000000000000000000000..eeff14c4d7c4aa8213f8673d9dd6a4745ececb1a --- /dev/null +++ b/tests/fpgadataflow/test_fpgadataflow_ip_stitch.py @@ -0,0 +1,346 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os + +import pytest + +import numpy as np +from onnx import TensorProto, helper + +from finn.core.datatype import DataType +from finn.core.modelwrapper import ModelWrapper +from finn.core.onnx_exec import execute_onnx +from finn.custom_op.registry import getCustomOp +from finn.transformation.fpgadataflow.codegen_ipgen import CodeGen_ipgen +from finn.transformation.fpgadataflow.codegen_ipstitch import CodeGen_ipstitch +from finn.transformation.fpgadataflow.create_dataflow_partition import ( + CreateDataflowPartition, +) +from finn.transformation.fpgadataflow.hlssynth_ipgen import HLSSynth_IPGen +from finn.transformation.fpgadataflow.insert_tlastmarker import InsertTLastMarker +from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ +from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver +from finn.transformation.fpgadataflow.make_pynq_proj import MakePYNQProject +from finn.transformation.fpgadataflow.synth_pynq_proj import SynthPYNQProject +from finn.transformation.general import GiveUniqueNodeNames +from finn.util.basic import ( + calculate_signed_dot_prod_range, + gen_finn_dt_tensor, + pynq_part_map, +) +from finn.util.fpgadataflow import pyverilate_stitched_ip + +test_pynq_board = os.getenv("PYNQ_BOARD", default="Pynq-Z1") +test_fpga_part = pynq_part_map[test_pynq_board] + +ip_stitch_model_dir = "/tmp/" + os.environ["FINN_INST_NAME"] + + +def create_one_fc_model(): + # create a model with a StreamingFCLayer instance with no activation + # the wider range of the full accumulator makes debugging a bit easier + wdt = DataType.INT2 + idt = DataType.INT2 + odt = DataType.INT32 + m = 4 + no_act = 1 + binary_xnor_mode = 0 + actval = 0 + simd = 2 + pe = 2 + + inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, m]) + outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, m]) + + fc0 = helper.make_node( + "StreamingFCLayer_Batch", + ["inp", "w0"], + ["outp"], + domain="finn", + backend="fpgadataflow", + resType="ap_resource_lut()", + MW=m, + MH=m, + SIMD=simd, + PE=pe, + inputDataType=idt.name, + weightDataType=wdt.name, + outputDataType=odt.name, + ActVal=actval, + binaryXnorMode=binary_xnor_mode, + noActivation=no_act, + ) + + graph = helper.make_graph( + nodes=[fc0], name="fclayer_graph", inputs=[inp], outputs=[outp] + ) + + model = helper.make_model(graph, producer_name="fclayer-model") + model = ModelWrapper(model) + + model.set_tensor_datatype("inp", idt) + model.set_tensor_datatype("outp", odt) + model.set_tensor_datatype("w0", wdt) + + # generate weights + w0 = np.eye(m, dtype=np.float32) + model.set_initializer("w0", w0) + + model = model.transform(CreateDataflowPartition()) + return model + + +def create_two_fc_model(): + # create a model with two StreamingFCLayer instances + wdt = DataType.INT2 + idt = DataType.INT2 + odt = DataType.INT2 + act = DataType.INT2 + m = 4 + tdt = DataType.INT32 + actval = odt.min() + no_act = 0 + binary_xnor_mode = 0 + pe = 2 + simd = 2 + + inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, m]) + mid = helper.make_tensor_value_info("mid", TensorProto.FLOAT, [1, m]) + outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, m]) + + fc0 = helper.make_node( + "StreamingFCLayer_Batch", + ["inp", "w0", "t0"], + ["mid"], + domain="finn", + backend="fpgadataflow", + resType="ap_resource_lut()", + MW=m, + MH=m, + SIMD=simd, + PE=pe, + inputDataType=idt.name, + weightDataType=wdt.name, + outputDataType=odt.name, + ActVal=actval, + binaryXnorMode=binary_xnor_mode, + noActivation=no_act, + ) + + fc1 = helper.make_node( + "StreamingFCLayer_Batch", + ["mid", "w1", "t1"], + ["outp"], + domain="finn", + backend="fpgadataflow", + resType="ap_resource_lut()", + MW=m, + MH=m, + SIMD=simd, + PE=pe, + inputDataType=idt.name, + weightDataType=wdt.name, + outputDataType=odt.name, + ActVal=actval, + binaryXnorMode=binary_xnor_mode, + noActivation=no_act, + ) + + graph = helper.make_graph( + nodes=[fc0, fc1], + name="fclayer_graph", + inputs=[inp], + outputs=[outp], + value_info=[mid], + ) + + model = helper.make_model(graph, producer_name="fclayer-model") + model = ModelWrapper(model) + + model.set_tensor_datatype("inp", idt) + model.set_tensor_datatype("mid", idt) + model.set_tensor_datatype("outp", odt) + model.set_tensor_datatype("w0", wdt) + model.set_tensor_datatype("w1", wdt) + + # generate weights + w0 = gen_finn_dt_tensor(wdt, (m, m)) + w1 = gen_finn_dt_tensor(wdt, (m, m)) + model.set_initializer("w0", w0) + model.set_initializer("w1", w1) + + # generate thresholds + (min, max) = calculate_signed_dot_prod_range(idt, wdt, m) + n_steps = act.get_num_possible_values() - 1 + t0 = np.random.randint(min, max - 1, (m, n_steps)).astype(np.float32) + t1 = np.random.randint(min, max - 1, (m, n_steps)).astype(np.float32) + # provide non-decreasing thresholds + t0 = np.sort(t0, axis=1) + t1 = np.sort(t1, axis=1) + + model.set_initializer("t0", t0) + model.set_initializer("t1", t1) + model.set_tensor_datatype("t0", tdt) + model.set_tensor_datatype("t1", tdt) + return model + + +# exec_mode of StreamingDataflowPartition +# @pytest.mark.parametrize("exec_mode", ["remote_pynq"]) #, "rtlsim"]) +def test_fpgadataflow_ipstitch_gen_model(): # exec_mode): + model = create_one_fc_model() + if model.graph.node[0].op_type == "StreamingDataflowPartition": + sdp_node = getCustomOp(model.graph.node[0]) + assert sdp_node.__class__.__name__ == "StreamingDataflowPartition" + assert os.path.isfile(sdp_node.get_nodeattr("model")) + model = ModelWrapper(sdp_node.get_nodeattr("model")) + model.set_metadata_prop("exec_mode", "remote_pynq") + model = model.transform(InsertTLastMarker()) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(CodeGen_ipgen(test_fpga_part, 5)) + model = model.transform(HLSSynth_IPGen()) + assert model.graph.node[0].op_type == "StreamingFCLayer_Batch" + assert model.graph.node[-1].op_type == "TLastMarker" + model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_gen_model.onnx") + + +def test_fpgadataflow_ipstitch_do_stitch(): + model = ModelWrapper( + ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_gen_model.onnx" + ) + model = model.transform(CodeGen_ipstitch(test_fpga_part)) + vivado_stitch_proj_dir = model.get_metadata_prop("vivado_stitch_proj") + assert vivado_stitch_proj_dir is not None + assert os.path.isdir(vivado_stitch_proj_dir) + assert os.path.isfile(vivado_stitch_proj_dir + "/ip/component.xml") + vivado_stitch_vlnv = model.get_metadata_prop("vivado_stitch_vlnv") + assert vivado_stitch_vlnv is not None + assert vivado_stitch_vlnv == "xilinx_finn:finn:finn_design:1.0" + model.save(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch.onnx") + + +def test_fpgadataflow_ipstitch_rtlsim(): + model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch.onnx") + sim = pyverilate_stitched_ip(model) + exp_io = [ + "ap_clk_0", + "ap_rst_n_0", + "in0_V_V_0_tdata", + "in0_V_V_0_tready", + "in0_V_V_0_tvalid", + "out_r_0_tdata", + "out_r_0_tkeep", + "out_r_0_tlast", + "out_r_0_tready", + "out_r_0_tvalid", + ] + assert dir(sim.io) == exp_io + model.set_metadata_prop("exec_mode", "rtlsim") + idt = model.get_tensor_datatype("inp") + ishape = model.get_tensor_shape("inp") + x = gen_finn_dt_tensor(idt, ishape) + rtlsim_res = execute_onnx(model, {"inp": x})["outp"] + assert (rtlsim_res == x).all() + + +def test_fpgadataflow_ipstitch_pynq_projgen(): + model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_ip_stitch.onnx") + model = model.transform(MakePYNQProject(test_pynq_board)) + vivado_pynq_proj_dir = model.get_metadata_prop("vivado_pynq_proj") + assert vivado_pynq_proj_dir is not None + assert os.path.isdir(vivado_pynq_proj_dir) + model.save(ip_stitch_model_dir + "/test_fpgadataflow_pynq_projgen.onnx") + + +def test_fpgadataflow_ipstitch_pynq_synth(): + model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_pynq_projgen.onnx") + model = model.transform(SynthPYNQProject()) + bitfile = model.get_metadata_prop("vivado_pynq_bitfile") + assert bitfile is not None + assert os.path.isfile(bitfile) + model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_synth.onnx") + + +def test_fpgadataflow_ipstitch_pynq_driver(): + model = ModelWrapper(ip_stitch_model_dir + "/test_fpgadataflow_pynq_projgen.onnx") + model = model.transform(MakePYNQDriver()) + driver_dir = model.get_metadata_prop("pynq_driver_dir") + assert driver_dir is not None + assert os.path.isdir(driver_dir) + model.save(ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_driver.onnx") + + +def test_fpgadataflow_ipstitch_pynq_deployment_folder(): + model = ModelWrapper( + ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_driver.onnx" + ) + try: + ip = os.environ["PYNQ_IP"] # no default for this one; skip if not defined + if ip == "": + pytest.skip("PYNQ board IP address not specified") + username = os.getenv("PYNQ_USERNAME", "xilinx") + password = os.getenv("PYNQ_PASSWORD", "xilinx") + target_dir = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn") + model = model.transform(DeployToPYNQ(ip, username, password, target_dir)) + pynq_ip = model.get_metadata_prop("pynq_ip") + pynq_username = model.get_metadata_prop("pynq_username") + pynq_password = model.get_metadata_prop("pynq_password") + pynq_target_dir = model.get_metadata_prop("pynq_target_dir") + + assert pynq_ip == ip + assert pynq_username == username + assert pynq_password == password + assert pynq_target_dir == target_dir + + deployment_dir = model.get_metadata_prop("pynq_deploy_dir") + assert deployment_dir is not None + assert os.path.isdir(deployment_dir) + + model.save( + ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_deployment.onnx" + ) + except KeyError: + pytest.skip("PYNQ board IP address not specified") + + +def test_fpgadataflow_ipstitch_remote_execution(): + model = ModelWrapper( + ip_stitch_model_dir + "/test_fpgadataflow_ipstitch_pynq_deployment.onnx" + ) + try: + ip = os.environ["PYNQ_IP"] # NOQA + if ip == "": + pytest.skip("PYNQ board IP address not specified") + idt = DataType.INT2 + x = gen_finn_dt_tensor(idt, (1, 4)) + input_dict = {"inp": x} + outp = execute_onnx(model, input_dict) + assert np.isclose(outp["outp"], x).all() + except KeyError: + pytest.skip("PYNQ board IP address not specified") diff --git a/tests/fpgadataflow/test_fpgadataflow_res_estimate.py b/tests/fpgadataflow/test_fpgadataflow_res_estimate.py new file mode 100644 index 0000000000000000000000000000000000000000..0dd3fd7a9fefaaad9777ac98a35806a9eaa35188 --- /dev/null +++ b/tests/fpgadataflow/test_fpgadataflow_res_estimate.py @@ -0,0 +1,101 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from onnx import TensorProto, helper + +from finn.analysis.fpgadataflow.res_estimation import res_estimation +from finn.core.datatype import DataType +from finn.core.modelwrapper import ModelWrapper +from finn.transformation.general import GiveUniqueNodeNames + + +def check_two_dict_for_equality(dict1, dict2): + for key in dict1: + assert key in dict2, "Key: {} is not in both dictionaries".format(key) + assert ( + dict1[key] == dict2[key] + ), """Values for key {} are not the same + in both dictionaries""".format( + key + ) + + return True + + +def test_res_estimate(): + mw = mh = 4 + simd = 1 + pe = 1 + idt = DataType.INT2 + wdt = DataType.INT2 + odt = DataType.INT32 + actval = odt.min() + + inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, mw]) + outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, mh]) + node_inp_list = ["inp", "weights", "thresh"] + + FCLayer_node = helper.make_node( + "StreamingFCLayer_Batch", + node_inp_list, + ["outp"], + domain="finn", + backend="fpgadataflow", + resType="ap_resource_lut()", + MW=mw, + MH=mh, + SIMD=simd, + PE=pe, + inputDataType=idt.name, + weightDataType=wdt.name, + outputDataType=odt.name, + ActVal=actval, + binaryXnorMode=0, + noActivation=0, + ) + graph = helper.make_graph( + nodes=[FCLayer_node], name="fclayer_graph", inputs=[inp], outputs=[outp] + ) + + model = helper.make_model(graph, producer_name="fclayer-model") + model = ModelWrapper(model) + + model.set_tensor_datatype("inp", idt) + model.set_tensor_datatype("outp", odt) + model.set_tensor_datatype("weights", wdt) + + model = model.transform(GiveUniqueNodeNames()) + prod_resource_estimation = model.analysis(res_estimation) + expect_resource_estimation = { + "StreamingFCLayer_Batch_0": ["BRAMs: 1", "LUTs: 304.4"] + } + + assert check_two_dict_for_equality( + prod_resource_estimation, expect_resource_estimation + ), """The produced output of + the resource estimation analysis pass is not equal to the expected one""" diff --git a/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py b/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py index 4a7ca1b5c0473c520f0e2ea775f7c8950eb16695..67bb38640a4004569e43876d525eb0952e784128 100644 --- a/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py +++ b/tests/fpgadataflow/test_layer_streaming_maxpool_batch.py @@ -1,12 +1,40 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import TensorProto, helper import finn.core.onnx_exec as oxe from finn.core.datatype import DataType from finn.core.modelwrapper import ModelWrapper -from finn.transformation.fpgadataflow.cleanup import CleanUp -from finn.transformation.fpgadataflow.codegen import CodeGen +from finn.transformation.fpgadataflow.codegen_npysim import CodeGen_npysim from finn.transformation.fpgadataflow.compile import Compile +from finn.transformation.fpgadataflow.set_exec_mode import SetExecMode def test_layer_streaming_maxpool_batch(): @@ -110,12 +138,10 @@ def test_layer_streaming_maxpool_batch(): ], dtype=np.float32, ).reshape(2, 2, 4, 4) - print(input_tensor) - model = model.transform(CodeGen()) + model = model.transform(SetExecMode("npysim")) + model = model.transform(CodeGen_npysim()) model = model.transform(Compile()) input_dict = {"in": input_tensor} - output_dict = oxe.execute_onnx(model, input_dict) - print(output_dict) - model = model.transform(CleanUp()) + output_dict = oxe.execute_onnx(model, input_dict) # NOQA diff --git a/tests/fpgadataflow/test_npy2hls.py b/tests/fpgadataflow/test_npy2hls.py deleted file mode 100644 index 47b4eb4103b83664e83c10bcc7f867996e52a5e4..0000000000000000000000000000000000000000 --- a/tests/fpgadataflow/test_npy2hls.py +++ /dev/null @@ -1,135 +0,0 @@ -import shutil -import subprocess -import tempfile as tmp - -import numpy as np - -import finn.core.utils as cutil -from finn.backend.fpgadataflow.utils import numpy_to_hls_code -from finn.core.datatype import DataType - - -def make_npy2apintstream_testcase(ndarray, dtype): - test_dir = tmp.mkdtemp(prefix="test_npy2apintstream_") - shape = ndarray.shape - elem_bits = dtype.bitwidth() - packed_bits = shape[-1] * elem_bits - packed_hls_type = "ap_uint<%d>" % packed_bits - elem_hls_type = dtype.get_hls_datatype_str() - npy_in = test_dir + "/in.npy" - npy_out = test_dir + "/out.npy" - # restrict the np datatypes we can handle - npyt_to_ct = { - "float32": "float", - "float64": "double", - "int8": "int8_t", - "int32": "int32_t", - "int64": "int64_t", - "uint8": "uint8_t", - "uint32": "uint32_t", - "uint64": "uint64_t", - } - npy_type = npyt_to_ct[str(ndarray.dtype)] - shape_cpp_str = str(shape).replace("(", "{").replace(")", "}") - test_app_string = [] - test_app_string += ["#include <cstddef>"] - test_app_string += ['#include "ap_int.h"'] - test_app_string += ['#include "stdint.h"'] - test_app_string += ['#include "hls_stream.h"'] - test_app_string += ['#include "cnpy.h"'] - test_app_string += ['#include "npy2apintstream.hpp"'] - test_app_string += ["int main(int argc, char *argv[]) {"] - test_app_string += ["hls::stream<%s> teststream;" % packed_hls_type] - test_app_string += [ - 'npy2apintstream<%s, %s, %d, %s>("%s", teststream);' - % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in) - ] - test_app_string += [ - 'apintstream2npy<%s, %s, %d, %s>(teststream, %s, "%s");' - % (packed_hls_type, elem_hls_type, elem_bits, npy_type, shape_cpp_str, npy_out) - ] - test_app_string += ["return 0;"] - test_app_string += ["}"] - with open(test_dir + "/test.cpp", "w") as f: - f.write("\n".join(test_app_string)) - cmd_compile = """ -g++ -o test_npy2apintstream test.cpp /workspace/cnpy/cnpy.cpp \ --I/workspace/cnpy/ -I/workspace/vivado-hlslib -I/workspace/finn/src/finn/data/cpp \ ---std=c++11 -lz""" - with open(test_dir + "/compile.sh", "w") as f: - f.write(cmd_compile) - compile = subprocess.Popen( - ["sh", "compile.sh"], stdout=subprocess.PIPE, cwd=test_dir - ) - (stdout, stderr) = compile.communicate() - np.save(npy_in, ndarray) - execute = subprocess.Popen( - "./test_npy2apintstream", stdout=subprocess.PIPE, cwd=test_dir - ) - (stdout, stderr) = execute.communicate() - produced = np.load(npy_out) - success = (produced == ndarray).all() - # only delete generated code if test has passed - # useful for debug otherwise - if success: - shutil.rmtree(test_dir) - assert success - - -test_shapes = [(1, 2, 4), (1, 1, 64), (2, 64)] - - -def test_npy2apintstream_binary(): - for test_shape in test_shapes: - dt = DataType.BINARY - W = cutil.gen_finn_dt_tensor(dt, test_shape) - make_npy2apintstream_testcase(W, dt) - - -def test_npy2apintstream_int2(): - for test_shape in test_shapes: - dt = DataType.INT2 - W = cutil.gen_finn_dt_tensor(dt, test_shape) - make_npy2apintstream_testcase(W, dt) - - -def test_array2hexstring(): - assert cutil.array2hexstring([1, 1, 1, 0], DataType.BINARY, 4) == "0xe" - assert cutil.array2hexstring([1, 1, 1, 0], DataType.BINARY, 8) == "0x0e" - assert cutil.array2hexstring([1, 1, 1, -1], DataType.BIPOLAR, 8) == "0x0e" - assert cutil.array2hexstring([3, 3, 3, 3], DataType.UINT2, 8) == "0xff" - assert cutil.array2hexstring([1, 3, 3, 1], DataType.UINT2, 8) == "0x7d" - assert cutil.array2hexstring([1, -1, 1, -1], DataType.INT2, 8) == "0x77" - assert cutil.array2hexstring([1, 1, 1, -1], DataType.INT4, 16) == "0x111f" - assert cutil.array2hexstring([-1], DataType.FLOAT32, 32) == "0xbf800000" - assert cutil.array2hexstring([17.125], DataType.FLOAT32, 32) == "0x41890000" - - -def test_pack_innermost_dim_as_hex_string(): - A = [[1, 1, 1, 0], [0, 1, 1, 0]] - eA = np.asarray(["0x0e", "0x06"]) - assert (cutil.pack_innermost_dim_as_hex_string(A, DataType.BINARY, 8) == eA).all() - B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] - eB = np.asarray([["0x0f", "0x0f"], ["0x07", "0x0d"]]) - assert (cutil.pack_innermost_dim_as_hex_string(B, DataType.UINT2, 8) == eB).all() - - -def test_numpy_to_hls_code(): - def remove_all_whitespace(s): - return "".join(s.split()) - - A = [[1, 1, 1, 0], [0, 1, 1, 0]] - ret = numpy_to_hls_code(A, DataType.BINARY, "test", True) - eA = """ap_uint<4> test[2] = - {ap_uint<4>("0xe", 16), ap_uint<4>("0x6", 16)};""" - assert remove_all_whitespace(ret) == remove_all_whitespace(eA) - B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] - ret = numpy_to_hls_code(B, DataType.UINT2, "test", True) - eB = """ap_uint<4> test[2][2] = - {{ap_uint<4>("0xf", 16), ap_uint<4>("0xf", 16)}, - {ap_uint<4>("0x7", 16), ap_uint<4>("0xd", 16)}};""" - assert remove_all_whitespace(ret) == remove_all_whitespace(eB) - ret = numpy_to_hls_code(B, DataType.UINT2, "test", True, True) - eB = """{{ap_uint<4>("0xf", 16), ap_uint<4>("0xf", 16)}, - {ap_uint<4>("0x7", 16), ap_uint<4>("0xd", 16)}};""" - assert remove_all_whitespace(ret) == remove_all_whitespace(eB) diff --git a/tests/test_basic_onnx_exec.py b/tests/test_basic_onnx_exec.py deleted file mode 100644 index c7b3da1b78385d36fc73790b22336242141d5255..0000000000000000000000000000000000000000 --- a/tests/test_basic_onnx_exec.py +++ /dev/null @@ -1,27 +0,0 @@ -from pkgutil import get_data - -import numpy as np -import onnx -import onnx.numpy_helper as np_helper - -import finn.core.onnx_exec as oxe -from finn.core.modelwrapper import ModelWrapper -from finn.transformation.infer_shapes import InferShapes - - -def test_mnist_onnx_download_extract_run(): - # load the onnx model - raw_m = get_data("finn", "data/onnx/mnist-conv/model.onnx") - model = ModelWrapper(raw_m) - model = model.transform(InferShapes()) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - raw_o = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/output_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - output_tensor = onnx.load_tensor_from_string(raw_o) - # run using FINN-based execution - input_dict = {"Input3": np_helper.to_array(input_tensor)} - output_dict = oxe.execute_onnx(model, input_dict) - assert np.isclose( - np_helper.to_array(output_tensor), output_dict["Plus214_Output_0"], atol=1e-3 - ).all() diff --git a/tests/test_brevitas_export.py b/tests/test_brevitas_export.py deleted file mode 100644 index 641e5e3c49b917ad06d649d797e6f9bc9f30f170..0000000000000000000000000000000000000000 --- a/tests/test_brevitas_export.py +++ /dev/null @@ -1,132 +0,0 @@ -import os -from pkgutil import get_data - -import brevitas.onnx as bo -import numpy as np -import onnx -import onnx.numpy_helper as nph -import torch -from models.LFC import LFC - -import finn.core.onnx_exec as oxe -from finn.core.modelwrapper import ModelWrapper -from finn.transformation.fold_constants import FoldConstants -from finn.transformation.infer_shapes import InferShapes - -export_onnx_path = "test_output_lfc.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_lfc_w1a1_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar" -) - -trained_lfc_w1a2_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W2A/checkpoints/best.tar" -) - - -def test_brevitas_trained_lfc_w1a1_pytorch(): - # load pretrained weights into LFC-w1a1 - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1).eval() - checkpoint = torch.load(trained_lfc_w1a1_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float() - assert input_tensor.shape == (1, 1, 28, 28) - # do forward pass in PyTorch/Brevitas - produced = lfc.forward(input_tensor).detach().numpy() - expected = [ - [ - 3.3253, - -2.5652, - 9.2157, - -1.4251, - 1.4251, - -3.3728, - 0.2850, - -0.5700, - 7.0781, - -1.2826, - ] - ] - assert np.isclose(produced, expected, atol=1e-4).all() - - -def test_brevitas_trained_lfc_w1a2_pytorch(): - # load pretrained weights into LFC-w1a2 - lfc = LFC(weight_bit_width=1, act_bit_width=2, in_bit_width=2).eval() - checkpoint = torch.load(trained_lfc_w1a2_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float() - assert input_tensor.shape == (1, 1, 28, 28) - # do forward pass in PyTorch/Brevitas - produced = lfc.forward(input_tensor).detach().numpy() - expected = [ - [ - 4.598069, - -6.3698025, - 10.75695, - 0.3796571, - 1.4764442, - -5.4417515, - -1.8982856, - -5.610488, - 6.116698, - 0.21092065, - ] - ] - assert np.isclose(produced, expected, atol=1e-4).all() - - -def test_brevitas_to_onnx_export_and_exec_lfc_w1a1(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_w1a1_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) - model = ModelWrapper(export_onnx_path) - model = model.transform(InferShapes()) - model = model.transform(FoldConstants()) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - # run using FINN-based execution - input_dict = {"0": nph.to_array(input_tensor)} - output_dict = oxe.execute_onnx(model, input_dict) - produced = output_dict[list(output_dict.keys())[0]] - # run using PyTorch/Brevitas - input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float() - assert input_tensor.shape == (1, 1, 28, 28) - # do forward pass in PyTorch/Brevitas - expected = lfc.forward(input_tensor).detach().numpy() - assert np.isclose(produced, expected, atol=1e-3).all() - # remove the downloaded model and extracted files - os.remove(export_onnx_path) - - -def test_brevitas_to_onnx_export_and_exec_lfc_w1a2(): - lfc = LFC(weight_bit_width=1, act_bit_width=2, in_bit_width=2) - checkpoint = torch.load(trained_lfc_w1a2_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) - model = ModelWrapper(export_onnx_path) - model = model.transform(InferShapes()) - model = model.transform(FoldConstants()) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - # run using FINN-based execution - input_dict = {"0": nph.to_array(input_tensor)} - output_dict = oxe.execute_onnx(model, input_dict) - produced = output_dict[list(output_dict.keys())[0]] - # run using PyTorch/Brevitas - input_tensor = torch.from_numpy(nph.to_array(input_tensor)).float() - assert input_tensor.shape == (1, 1, 28, 28) - # do forward pass in PyTorch/Brevitas - expected = lfc.forward(input_tensor).detach().numpy() - assert np.isclose(produced, expected, atol=1e-3).all() - # remove the downloaded model and extracted files - os.remove(export_onnx_path) diff --git a/tests/test_code_generation.py b/tests/test_code_generation.py deleted file mode 100644 index 0d6a371079d27890aeae5123f194fb8fa0e06210..0000000000000000000000000000000000000000 --- a/tests/test_code_generation.py +++ /dev/null @@ -1,12 +0,0 @@ -from pkgutil import get_data - -import finn.backend.fpgadataflow.code_gen as cg -from finn.core.modelwrapper import ModelWrapper - - -def test_code_generation(): - # load the onnx model - raw_m = get_data("finn", "data/onnx/finn-hls-model/finn-hls-onnx-model.onnx") - model = ModelWrapper(raw_m) - code_gen_dict = cg.code_generation(model) - print(code_gen_dict) diff --git a/tests/test_finn_hls_onnx_graph.py b/tests/test_finn_hls_onnx_graph.py deleted file mode 100644 index 06308644473ab57b05ba0f46c3696c857b00e7a6..0000000000000000000000000000000000000000 --- a/tests/test_finn_hls_onnx_graph.py +++ /dev/null @@ -1,128 +0,0 @@ -import onnx -from onnx import TensorProto, helper - -from finn.core.datatype import DataType -from finn.core.modelwrapper import ModelWrapper - - -def test_manually_construct_onnx_graph(): - - inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, 13, 64]) - outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, 1, 64]) - - memInStrm_node = helper.make_node( - "FIFO", ["inp"], ["memInStrm"], "memInStrm", domain='finn', backend='fpgadataflow', depth=1024 - ) - FCLayer0_node = helper.make_node( - "StreamingFCLayer_Batch", - ["memInStrm", "weights0", "thresh0"], - ["out1"], - domain='finn', - backend='fpgadataflow', - resType="ap_resource_lut()", - MW=832, - MH=1024, - SIMD=64, - PE=32, - resDataType="Recast<XnorMul>", - ) - inter0_node = helper.make_node("FIFO", ["out1"], ["inter0"], "inter0", domain='finn', backend='fpgadataflow', depth=16) - FCLayer1_node = helper.make_node( - "StreamingFCLayer_Batch", - ["inter0", "weights1", "thresh1"], - ["out2"], - domain='finn', - backend='fpgadataflow', - resType="ap_resource_lut()", - MW=1024, - MH=1024, - SIMD=32, - PE=64, - resDataType="Recast<XnorMul>", - ) - inter1_node = helper.make_node("FIFO", ["out2"], ["inter1"], "inter1", domain='finn', backend='fpgadataflow', depth=16) - FCLayer2_node = helper.make_node( - "StreamingFCLayer_Batch", - ["inter1", "weights2", "thresh2"], - ["out3"], - domain='finn', - backend='fpgadataflow', - resType="ap_resource_lut()", - MW=1024, - MH=1024, - SIMD=64, - PE=32, - resDataType="Recast<XnorMul>", - ) - inter2_node = helper.make_node("FIFO", ["out3"], ["inter2"], "inter2", domain='finn', backend='fpgadataflow', depth=8) - FCLayer3_node = helper.make_node( - "StreamingFCLayer_Batch", - ["inter2", "weights3", "thresh3"], - ["out4"], - domain='finn', - backend='fpgadataflow', - resType="ap_resource_lut()", - MW=1024, - MH=64, - SIMD=8, - PE=16, - resDataType="Recast<XnorMul>", - ) - memOutStrm_node = helper.make_node( - "FIFO", ["out4"], ["outp"], "memOutStrm", domain='finn', backend='fpgadataflow', depth=1024 - ) - - graph = helper.make_graph( - nodes=[ - memInStrm_node, - FCLayer0_node, - inter0_node, - FCLayer1_node, - inter1_node, - FCLayer2_node, - inter2_node, - FCLayer3_node, - memOutStrm_node, - ], - name="finn_hls_onnx_graph", - inputs=[inp], - outputs=[outp], - value_info=[ - helper.make_tensor_value_info("memInStrm", TensorProto.FLOAT, [1, 13, 64]), - helper.make_tensor_value_info("weights0", TensorProto.FLOAT, [64, 32, 416]), - helper.make_tensor_value_info( - "thresh0", TensorProto.FLOAT, [32, 32, 1, 16, 1] - ), - helper.make_tensor_value_info("out1", TensorProto.FLOAT, [1, 32, 32]), - helper.make_tensor_value_info("inter0", TensorProto.FLOAT, [1, 32, 32]), - helper.make_tensor_value_info("weights1", TensorProto.FLOAT, [32, 64, 512]), - helper.make_tensor_value_info( - "thresh1", TensorProto.FLOAT, [16, 64, 1, 16, 1] - ), - helper.make_tensor_value_info("out2", TensorProto.FLOAT, [1, 16, 64]), - helper.make_tensor_value_info("inter1", TensorProto.FLOAT, [1, 16, 64]), - helper.make_tensor_value_info("weights2", TensorProto.FLOAT, [64, 32, 512]), - helper.make_tensor_value_info( - "thresh2", TensorProto.FLOAT, [32, 32, 1, 16, 1] - ), - helper.make_tensor_value_info("out3", TensorProto.FLOAT, [1, 32, 32]), - helper.make_tensor_value_info("inter2", TensorProto.FLOAT, [1, 32, 32]), - helper.make_tensor_value_info("weights3", TensorProto.FLOAT, [8, 16, 512]), - helper.make_tensor_value_info( - "thresh3", TensorProto.FLOAT, [4, 16, 1, 16, 1] - ), - helper.make_tensor_value_info("out4", TensorProto.FLOAT, [1, 1, 64]), - ], - ) - model = helper.make_model(graph, producer_name="finn-hls-onnx-model") - model = ModelWrapper(model) - - # set the tensor datatypes (in this case: all to bipolar) - for tensor in graph.input: - model.set_tensor_datatype(tensor.name, DataType["BIPOLAR"]) - for tensor in graph.output: - model.set_tensor_datatype(tensor.name, DataType["BIPOLAR"]) - for tensor in graph.value_info: - model.set_tensor_datatype(tensor.name, DataType["BIPOLAR"]) - - #onnx.save(model.model, "finn-hls-onnx-model.onnx") diff --git a/tests/test_padding.py b/tests/test_padding.py deleted file mode 100644 index 8075b1be8fc2ad442e29ef9ed56b22b178ce66ee..0000000000000000000000000000000000000000 --- a/tests/test_padding.py +++ /dev/null @@ -1,29 +0,0 @@ -import numpy as np - -from finn.core.utils import pad_tensor_to_multiple_of - - -def test_pad_tensor_to_multiple_of(): - A = np.eye(3) - B = pad_tensor_to_multiple_of(A, [2, 2], val=-1) - assert B.shape == (4, 4) - assert (B[:3, :3] == A).all() - assert (B[3, :] == -1).all() - assert (B[:, 3] == -1).all() - B = pad_tensor_to_multiple_of(A, [5, 5], val=-1, distr_pad=True) - assert B.shape == (5, 5) - assert (B[1:4, 1:4] == A).all() - assert (B[0, :] == -1).all() - assert (B[:, 0] == -1).all() - assert (B[4, :] == -1).all() - assert (B[:, 4] == -1).all() - # using -1 in pad_to parameter should give an unpadded dimension - B = pad_tensor_to_multiple_of(A, [-1, 5], val=-1, distr_pad=True) - assert B.shape == (3, 5) - assert (B[:, 1:4] == A).all() - assert (B[:, 0] == -1).all() - assert (B[:, 4] == -1).all() - # if odd number of padding pixels required, 1 more should go after existing - B = pad_tensor_to_multiple_of(A, [6, 6], val=-1, distr_pad=True) - assert B.shape == (6, 6) - assert (B[1:4, 1:4] == A).all() diff --git a/tests/test_set_attribute.py b/tests/test_set_attribute.py deleted file mode 100644 index 7e56fdbb938441115bf458532560af8ac803b4f9..0000000000000000000000000000000000000000 --- a/tests/test_set_attribute.py +++ /dev/null @@ -1,57 +0,0 @@ -from onnx import TensorProto, helper - -from finn.core.datatype import DataType -from finn.core.modelwrapper import ModelWrapper -from finn.core.utils import get_by_name - - -def test_set_attribute(): - mw = 8 - mh = 8 - pe = 4 - simd = 4 - idt = odt = wdt = DataType.BIPOLAR - - inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, [1, mw]) - outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, [1, mh]) - node_inp_list = ["inp", "weights"] - - FCLayer_node = helper.make_node( - "StreamingFCLayer_Batch", - node_inp_list, - ["outp"], - domain="finn", - backend="", - code_gen_dir="", - executable_path="", - resType="ap_resource_lut()", - MW=0, - MH=mh, - SIMD=simd, - PE=pe, - inputDataType=idt.name, - weightDataType=wdt.name, - outputDataType=odt.name, - ) - graph = helper.make_graph( - nodes=[FCLayer_node], name="fclayer_graph", inputs=[inp], outputs=[outp] - ) - - model = helper.make_model(graph, producer_name="fclayer-model") - model = ModelWrapper(model) - - # test cases - value_to_set = "fpgadataflow" - model.set_attribute(FCLayer_node, "backend", value_to_set) - value = get_by_name(FCLayer_node.attribute, "backend") - assert value.s.decode("UTF-8") == value_to_set - - value_to_set = mw - model.set_attribute(FCLayer_node, "MW", value_to_set) - value = get_by_name(FCLayer_node.attribute, "MW") - assert value.i == value_to_set - - value_to_set = idt.name - model.set_attribute(FCLayer_node, "inputDataType", value_to_set) - value = get_by_name(FCLayer_node.attribute, "inputDataType") - assert value.s.decode("UTF-8") == value_to_set diff --git a/tests/test_shape_utils.py b/tests/test_shape_utils.py deleted file mode 100644 index 09e14bb05a52579af56d65cd0f4ef75c49fd5d81..0000000000000000000000000000000000000000 --- a/tests/test_shape_utils.py +++ /dev/null @@ -1,13 +0,0 @@ -import numpy as np - -import finn.core.utils as util - - -def test_interleave_matrix_outer_dim_from_partitions(): - A = np.eye(10) - n_parts = 2 - Ax = util.interleave_matrix_outer_dim_from_partitions(A, n_parts) - part_size = 10 // n_parts - assert Ax.shape == (n_parts, part_size, 10) - for r_ind in range(A.shape[0]): - assert (A[r_ind] == Ax[r_ind % n_parts][r_ind // n_parts]).all() diff --git a/tests/transformation/streamline/test_streamline_fc.py b/tests/transformation/streamline/test_streamline_fc.py new file mode 100644 index 0000000000000000000000000000000000000000..b287ab65265448983fcfe4f7d94d595cd5ac5343 --- /dev/null +++ b/tests/transformation/streamline/test_streamline_fc.py @@ -0,0 +1,74 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from pkgutil import get_data + +import brevitas.onnx as bo +import numpy as np +import onnx +import onnx.numpy_helper as nph +import pytest + +import finn.core.onnx_exec as oxe +from finn.core.modelwrapper import ModelWrapper +from finn.transformation.fold_constants import FoldConstants +from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames +from finn.transformation.infer_shapes import InferShapes +from finn.transformation.streamline import Streamline +from finn.util.test import get_test_model_trained +from finn.util.basic import make_build_dir + +export_onnx_path = make_build_dir("test_streamline_fc_") + +# activation: None or DataType +@pytest.mark.parametrize("size", ["TFC", "SFC", "LFC"]) +# weight bits +@pytest.mark.parametrize("wbits", [1]) +# act bits +@pytest.mark.parametrize("abits", [1, 2]) +def test_streamline_fc(size, wbits, abits): + nname = "%s_%dW%dA" % (size, wbits, abits) + finn_onnx = export_onnx_path + "/%s.onnx" % nname + fc = get_test_model_trained(size, wbits, abits) + bo.export_finn_onnx(fc, (1, 1, 28, 28), finn_onnx) + model = ModelWrapper(finn_onnx) + model = model.transform(InferShapes()) + model = model.transform(FoldConstants()) + model = model.transform(GiveUniqueNodeNames()) + model = model.transform(GiveReadableTensorNames()) + # load one of the test vectors + raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") + input_tensor = onnx.load_tensor_from_string(raw_i) + # run using FINN-based execution + input_dict = {"global_in": nph.to_array(input_tensor)} + expected_ctx = oxe.execute_onnx(model, input_dict, True) + expected = expected_ctx[model.graph.output[0].name] + model = model.transform(Streamline()) + produced_ctx = oxe.execute_onnx(model, input_dict, True) + produced = produced_ctx[model.graph.output[0].name] + assert np.isclose(expected, produced, atol=1e-3).all() diff --git a/tests/transformation/streamline/test_streamline_lfc_w1a1.py b/tests/transformation/streamline/test_streamline_lfc_w1a1.py deleted file mode 100644 index 06533a1d97b0734e7fd8a6aa0bf0a2a28548b9de..0000000000000000000000000000000000000000 --- a/tests/transformation/streamline/test_streamline_lfc_w1a1.py +++ /dev/null @@ -1,46 +0,0 @@ -import os -from pkgutil import get_data - -import brevitas.onnx as bo -import numpy as np -import onnx -import onnx.numpy_helper as nph -import torch -from models.LFC import LFC - -import finn.core.onnx_exec as oxe -from finn.core.modelwrapper import ModelWrapper -from finn.transformation.fold_constants import FoldConstants -from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames -from finn.transformation.infer_shapes import InferShapes -from finn.transformation.streamline import Streamline - -export_onnx_path = "test_output_lfc.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_lfc_w1a1_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar" -) - - -def test_streamline_lfc_w1a1(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_w1a1_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) - model = ModelWrapper(export_onnx_path) - model = model.transform(InferShapes()) - model = model.transform(FoldConstants()) - model = model.transform(GiveUniqueNodeNames()) - model = model.transform(GiveReadableTensorNames()) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - # run using FINN-based execution - input_dict = {"global_in": nph.to_array(input_tensor)} - expected_ctx = oxe.execute_onnx(model, input_dict, True) - expected = expected_ctx[model.graph.output[0].name] - model = model.transform(Streamline()) - produced_ctx = oxe.execute_onnx(model, input_dict, True) - produced = produced_ctx[model.graph.output[0].name] - assert np.isclose(expected, produced, atol=1e-3).all() - os.remove(export_onnx_path) diff --git a/tests/transformation/streamline/test_streamline_lfc_w1a2.py b/tests/transformation/streamline/test_streamline_lfc_w1a2.py deleted file mode 100644 index 9e9d347e85245e740605efea3a03e1792c3ea46b..0000000000000000000000000000000000000000 --- a/tests/transformation/streamline/test_streamline_lfc_w1a2.py +++ /dev/null @@ -1,47 +0,0 @@ -import os -from pkgutil import get_data - -import brevitas.onnx as bo -import numpy as np -import onnx -import onnx.numpy_helper as nph -import torch -from models.LFC import LFC - -import finn.core.onnx_exec as oxe -from finn.core.modelwrapper import ModelWrapper -from finn.transformation.fold_constants import FoldConstants -from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames -from finn.transformation.infer_shapes import InferShapes -from finn.transformation.streamline import Streamline - -export_onnx_path = "test_output_lfc.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_lfc_w1a2_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W2A/checkpoints/best.tar" -) - - -def test_streamline_lfc_w1a2(): - lfc = LFC(weight_bit_width=1, act_bit_width=2, in_bit_width=2).eval() - checkpoint = torch.load(trained_lfc_w1a2_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) - bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) - model = ModelWrapper(export_onnx_path) - model = model.transform(InferShapes()) - model = model.transform(FoldConstants()) - model = model.transform(GiveUniqueNodeNames()) - model = model.transform(GiveReadableTensorNames()) - # load one of the test vectors - raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb") - input_tensor = onnx.load_tensor_from_string(raw_i) - # run using FINN-based execution - input_dict = {"global_in": nph.to_array(input_tensor)} - expected_ctx = oxe.execute_onnx(model, input_dict, True) - expected = expected_ctx[model.graph.output[0].name] - model = model.transform(Streamline()) - produced_ctx = oxe.execute_onnx(model, input_dict, True) - produced = produced_ctx[model.graph.output[0].name] - assert np.isclose(expected, produced, atol=1e-3).all() - model.save("lfc-w1a2-streamlined.onnx") - os.remove(export_onnx_path) diff --git a/tests/transformation/test_batchnorm_to_affine.py b/tests/transformation/test_batchnorm_to_affine.py index dec01b53a9081c6c04143e4465d603d66fc771a6..8728707589ade72fb1b21ca0333c4d0757ac7df0 100644 --- a/tests/transformation/test_batchnorm_to_affine.py +++ b/tests/transformation/test_batchnorm_to_affine.py @@ -1,30 +1,50 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from pkgutil import get_data import brevitas.onnx as bo import onnx import onnx.numpy_helper as nph -import torch -from models.LFC import LFC import finn.core.onnx_exec as oxe from finn.core.modelwrapper import ModelWrapper from finn.transformation.batchnorm_to_affine import BatchNormToAffine from finn.transformation.fold_constants import FoldConstants from finn.transformation.infer_shapes import InferShapes +from finn.util.test import get_test_model_trained -export_onnx_path = "test_output_lfc.onnx" -transformed_onnx_path = "test_output_lfc_transformed.onnx" -# TODO get from config instead, hardcoded to Docker path for now -trained_lfc_checkpoint = ( - "/workspace/brevitas_cnv_lfc/pretrained_models/LFC_1W1A/checkpoints/best.tar" -) +export_onnx_path = "test_output_bn2affine.onnx" -def test_batchnorm_to_affine(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) +def test_batchnorm_to_affine_lfc_w1a1(): + lfc = get_test_model_trained("LFC", 1, 1) bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) @@ -36,3 +56,20 @@ def test_batchnorm_to_affine(): input_dict = {"0": nph.to_array(input_tensor)} assert oxe.compare_execution(model, new_model, input_dict) os.remove(export_onnx_path) + + +# cnv batchnorm to affine not yet supported + +# def test_batchnorm_to_affine_cnv_w1a1(): +# lfc = get_test_model_trained("CNV", 1, 1) +# bo.export_finn_onnx(lfc, (1, 3, 32, 32), export_onnx_path) +# model = ModelWrapper(export_onnx_path) +# model = model.transform(InferShapes()) +# model = model.transform(FoldConstants()) +# # TODO shape inference failing on transformed model below -- needs debug +# new_model = model.transform(BatchNormToAffine()) +# # check that there are no BN nodes left +# # TODO replace this with execution test +# op_types = list(map(lambda x: x.op_type, new_model.graph.node)) +# assert "BatchNormalization" not in op_types +# os.remove(export_onnx_path) diff --git a/tests/transformation/test_collapse_repeated_op.py b/tests/transformation/test_collapse_repeated_op.py index d97cdbc3033000b94e499988155b3af829040109..01d932ece0be4b0beb7ad6094284ec3efb1e525e 100644 --- a/tests/transformation/test_collapse_repeated_op.py +++ b/tests/transformation/test_collapse_repeated_op.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np import onnx.helper as oh from onnx import TensorProto diff --git a/tests/transformation/test_factor_out_mul_sign_magnitude.py b/tests/transformation/test_factor_out_mul_sign_magnitude.py index 3786492fefff046b098bce06cb2e624723b098ec..fca073f5a05e10bd721a18538dada05b4ad0d774 100644 --- a/tests/transformation/test_factor_out_mul_sign_magnitude.py +++ b/tests/transformation/test_factor_out_mul_sign_magnitude.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np import onnx.helper as oh from onnx import TensorProto diff --git a/tests/transformation/test_fold_constants.py b/tests/transformation/test_fold_constants.py index 09dbd95c27cec65183ec7dd6067ce187595fcf52..cd1c346593e3666ce8a89bd4248fa8436423de6d 100644 --- a/tests/transformation/test_fold_constants.py +++ b/tests/transformation/test_fold_constants.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from pkgutil import get_data @@ -5,12 +33,12 @@ import brevitas.onnx as bo import numpy as np import onnx import onnx.numpy_helper as np_helper -from models.LFC import LFC import finn.core.onnx_exec as oxe from finn.core.modelwrapper import ModelWrapper from finn.transformation.fold_constants import FoldConstants from finn.transformation.infer_shapes import InferShapes +from finn.util.test import get_test_model_untrained export_onnx_path = "test_output_lfc.onnx" @@ -32,7 +60,7 @@ def test_const_folding(): def test_const_folding_shapes(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) + lfc = get_test_model_untrained("LFC", 1, 1) bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) diff --git a/tests/transformation/test_general_transformation.py b/tests/transformation/test_general_transformation.py index dd44502add93d269b8d48ed951620b6a36f9fb1b..33b6041a170f3c0de8f741ef3ecb28682f6429ea 100644 --- a/tests/transformation/test_general_transformation.py +++ b/tests/transformation/test_general_transformation.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from pkgutil import get_data from finn.core.modelwrapper import ModelWrapper diff --git a/tests/transformation/test_infer_datatypes.py b/tests/transformation/test_infer_datatypes.py index e4269499c55eb89d9a9c268f79456ca6ac588028..ae8a52882a9126470dad6ca15d8c35000a8edaff 100644 --- a/tests/transformation/test_infer_datatypes.py +++ b/tests/transformation/test_infer_datatypes.py @@ -1,8 +1,34 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os import brevitas.onnx as bo -import torch -from models.LFC import LFC from finn.core.datatype import DataType from finn.core.modelwrapper import ModelWrapper @@ -10,6 +36,7 @@ from finn.transformation.fold_constants import FoldConstants from finn.transformation.general import GiveReadableTensorNames, GiveUniqueNodeNames from finn.transformation.infer_datatypes import InferDataTypes from finn.transformation.infer_shapes import InferShapes +from finn.util.test import get_test_model_trained export_onnx_path = "test_output_lfc.onnx" # TODO get from config instead, hardcoded to Docker path for now @@ -19,9 +46,7 @@ trained_lfc_checkpoint = ( def test_infer_datatypes(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) + lfc = get_test_model_trained("LFC", 1, 1) bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) diff --git a/tests/transformation/test_infer_shapes.py b/tests/transformation/test_infer_shapes.py index 20841b32275968ed842fdbbebffa7168b61b7e06..a6ebe540bb5e081178704ec0493d511277562acb 100644 --- a/tests/transformation/test_infer_shapes.py +++ b/tests/transformation/test_infer_shapes.py @@ -1,9 +1,37 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from pkgutil import get_data import numpy as np from onnx import TensorProto, helper -import finn.core.utils as util +import finn.util.basic as util from finn.core.modelwrapper import ModelWrapper from finn.transformation.infer_shapes import InferShapes diff --git a/tests/transformation/test_move_add_past_mul.py b/tests/transformation/test_move_add_past_mul.py index 565bbce39b83d94352d2cd69d01d8270675bbe1f..a0516d6fb2ff985fc112185ce99ad8facd841caf 100644 --- a/tests/transformation/test_move_add_past_mul.py +++ b/tests/transformation/test_move_add_past_mul.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np import onnx.helper as oh from onnx import TensorProto diff --git a/tests/transformation/test_move_scalar_past_matmul.py b/tests/transformation/test_move_scalar_past_matmul.py index c2771ce94d002ab62d33226771965140f8614ec1..896527e82d8cfa869cb979d1102904c70703a14c 100644 --- a/tests/transformation/test_move_scalar_past_matmul.py +++ b/tests/transformation/test_move_scalar_past_matmul.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np import onnx.helper as oh from onnx import TensorProto @@ -7,7 +35,7 @@ from finn.core.modelwrapper import ModelWrapper from finn.transformation.infer_shapes import InferShapes from finn.transformation.streamline import ( MoveScalarAddPastMatMul, - MoveScalarMulPastMatMul + MoveScalarMulPastMatMul, ) diff --git a/tests/transformation/test_renaming.py b/tests/transformation/test_renaming.py index aec1c8a10768f293ff9aaf44d7418c777837010c..db8b8410e27f881b15abc426537b153348f38206 100644 --- a/tests/transformation/test_renaming.py +++ b/tests/transformation/test_renaming.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from pkgutil import get_data import numpy as np diff --git a/tests/transformation/test_round_thresholds.py b/tests/transformation/test_round_thresholds.py index b6c2b926f32446720b3bdcf561f2994b6667ce9b..d59aba996201f8c2fc67cf6e40497b5d43611d39 100644 --- a/tests/transformation/test_round_thresholds.py +++ b/tests/transformation/test_round_thresholds.py @@ -1,3 +1,31 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import numpy as np from onnx import TensorProto, helper diff --git a/tests/transformation/test_sign_to_thres.py b/tests/transformation/test_sign_to_thres.py index 75327df3e5194f48f71c914bc62fc5a08588faff..1033a313560c714b02e256e5940694868fa41cbf 100644 --- a/tests/transformation/test_sign_to_thres.py +++ b/tests/transformation/test_sign_to_thres.py @@ -1,17 +1,44 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + import os from pkgutil import get_data import brevitas.onnx as bo import onnx import onnx.numpy_helper as nph -import torch -from models.LFC import LFC import finn.core.onnx_exec as oxe from finn.core.modelwrapper import ModelWrapper from finn.transformation.fold_constants import FoldConstants from finn.transformation.infer_shapes import InferShapes from finn.transformation.streamline import ConvertSignToThres +from finn.util.test import get_test_model_trained export_onnx_path = "test_output_lfc.onnx" transformed_onnx_path = "test_output_lfc_transformed.onnx" @@ -22,9 +49,7 @@ trained_lfc_checkpoint = ( def test_sign_to_thres(): - lfc = LFC(weight_bit_width=1, act_bit_width=1, in_bit_width=1) - checkpoint = torch.load(trained_lfc_checkpoint, map_location="cpu") - lfc.load_state_dict(checkpoint["state_dict"]) + lfc = get_test_model_trained("LFC", 1, 1) bo.export_finn_onnx(lfc, (1, 1, 28, 28), export_onnx_path) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) diff --git a/tests/travis_install.sh b/tests/travis_install.sh index 56953322aa9e6f2e976338d92263966362da479e..05ff95a6a2205d90a5813b41504e2105e36e2f92 100644 --- a/tests/travis_install.sh +++ b/tests/travis_install.sh @@ -1,4 +1,32 @@ #!/bin/bash +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # This script is meant to be called by the "install" step defined in # .travis.yml. See http://docs.travis-ci.com/ for more details. # The behavior of the script is controlled by environment variabled defined diff --git a/tests/util/test_data_packing.py b/tests/util/test_data_packing.py new file mode 100644 index 0000000000000000000000000000000000000000..495ec60966ef67f3bf7b99c63cc70e133859d087 --- /dev/null +++ b/tests/util/test_data_packing.py @@ -0,0 +1,232 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import shutil +import subprocess + +import pytest + +import numpy as np + +import finn.util.basic as cutil +from finn.core.datatype import DataType +from finn.util.data_packing import ( + array2hexstring, + finnpy_to_packed_bytearray, + numpy_to_hls_code, + pack_innermost_dim_as_hex_string, + packed_bytearray_to_finnpy, +) + + +@pytest.mark.parametrize("dtype", [DataType.BINARY, DataType.INT2, DataType.INT32]) +@pytest.mark.parametrize("test_shape", [(1, 2, 4), (1, 1, 64), (2, 64)]) +def test_npy2apintstream(test_shape, dtype): + ndarray = cutil.gen_finn_dt_tensor(dtype, test_shape) + test_dir = cutil.make_build_dir(prefix="test_npy2apintstream_") + shape = ndarray.shape + elem_bits = dtype.bitwidth() + packed_bits = shape[-1] * elem_bits + packed_hls_type = "ap_uint<%d>" % packed_bits + elem_hls_type = dtype.get_hls_datatype_str() + npy_in = test_dir + "/in.npy" + npy_out = test_dir + "/out.npy" + # restrict the np datatypes we can handle + npyt_to_ct = { + "float32": "float", + "float64": "double", + "int8": "int8_t", + "int32": "int32_t", + "int64": "int64_t", + "uint8": "uint8_t", + "uint32": "uint32_t", + "uint64": "uint64_t", + } + npy_type = npyt_to_ct[str(ndarray.dtype)] + shape_cpp_str = str(shape).replace("(", "{").replace(")", "}") + test_app_string = [] + test_app_string += ["#include <cstddef>"] + test_app_string += ["#define AP_INT_MAX_W 4096"] + test_app_string += ['#include "ap_int.h"'] + test_app_string += ['#include "stdint.h"'] + test_app_string += ['#include "hls_stream.h"'] + test_app_string += ['#include "cnpy.h"'] + test_app_string += ['#include "npy2apintstream.hpp"'] + test_app_string += ["int main(int argc, char *argv[]) {"] + test_app_string += ["hls::stream<%s> teststream;" % packed_hls_type] + test_app_string += [ + 'npy2apintstream<%s, %s, %d, %s>("%s", teststream);' + % (packed_hls_type, elem_hls_type, elem_bits, npy_type, npy_in) + ] + test_app_string += [ + 'apintstream2npy<%s, %s, %d, %s>(teststream, %s, "%s");' + % (packed_hls_type, elem_hls_type, elem_bits, npy_type, shape_cpp_str, npy_out) + ] + test_app_string += ["return 0;"] + test_app_string += ["}"] + with open(test_dir + "/test.cpp", "w") as f: + f.write("\n".join(test_app_string)) + cmd_compile = """ +g++ -o test_npy2apintstream test.cpp /workspace/cnpy/cnpy.cpp \ +-I/workspace/cnpy/ -I{}/include -I/workspace/finn/src/finn/data/cpp \ +--std=c++11 -lz""".format( + os.environ["VIVADO_PATH"] + ) + with open(test_dir + "/compile.sh", "w") as f: + f.write(cmd_compile) + compile = subprocess.Popen( + ["sh", "compile.sh"], stdout=subprocess.PIPE, cwd=test_dir + ) + (stdout, stderr) = compile.communicate() + np.save(npy_in, ndarray) + execute = subprocess.Popen( + "./test_npy2apintstream", stdout=subprocess.PIPE, cwd=test_dir + ) + (stdout, stderr) = execute.communicate() + produced = np.load(npy_out) + success = (produced == ndarray).all() + # only delete generated code if test has passed + # useful for debug otherwise + if success: + shutil.rmtree(test_dir) + assert success + + +def test_array2hexstring(): + assert array2hexstring([1, 1, 1, 0], DataType.BINARY, 4) == "0xe" + assert array2hexstring([1, 1, 1, 0], DataType.BINARY, 8) == "0x0e" + assert array2hexstring([1, 1, 1, -1], DataType.BIPOLAR, 8) == "0x0e" + assert array2hexstring([3, 3, 3, 3], DataType.UINT2, 8) == "0xff" + assert array2hexstring([1, 3, 3, 1], DataType.UINT2, 8) == "0x7d" + assert array2hexstring([1, -1, 1, -1], DataType.INT2, 8) == "0x77" + assert array2hexstring([1, 1, 1, -1], DataType.INT4, 16) == "0x111f" + assert array2hexstring([-1], DataType.FLOAT32, 32) == "0xbf800000" + assert array2hexstring([17.125], DataType.FLOAT32, 32) == "0x41890000" + assert array2hexstring([1, 1, 0, 1], DataType.BINARY, 4, reverse=True) == "0xb" + assert array2hexstring([1, 1, 1, 0], DataType.BINARY, 8, reverse=True) == "0x07" + + +def test_pack_innermost_dim_as_hex_string(): + A = [[1, 1, 1, 0], [0, 1, 1, 0]] + eA = np.asarray(["0x0e", "0x06"]) + assert (pack_innermost_dim_as_hex_string(A, DataType.BINARY, 8) == eA).all() + B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + eB = np.asarray([["0x0f", "0x0f"], ["0x07", "0x0d"]]) + assert (pack_innermost_dim_as_hex_string(B, DataType.UINT2, 8) == eB).all() + C = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + eC = np.asarray([["0x0f", "0x0f"], ["0x0d", "0x07"]]) + assert ( + pack_innermost_dim_as_hex_string(C, DataType.UINT2, 8, reverse_inner=True) == eC + ).all() + + +def test_numpy_to_hls_code(): + def remove_all_whitespace(s): + return "".join(s.split()) + + A = [[1, 1, 1, 0], [0, 1, 1, 0]] + ret = numpy_to_hls_code(A, DataType.BINARY, "test", True) + eA = """ap_uint<4> test[2] = + {ap_uint<4>("0xe", 16), ap_uint<4>("0x6", 16)};""" + assert remove_all_whitespace(ret) == remove_all_whitespace(eA) + B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + ret = numpy_to_hls_code(B, DataType.UINT2, "test", True) + eB = """ap_uint<4> test[2][2] = + {{ap_uint<4>("0xf", 16), ap_uint<4>("0xf", 16)}, + {ap_uint<4>("0x7", 16), ap_uint<4>("0xd", 16)}};""" + assert remove_all_whitespace(ret) == remove_all_whitespace(eB) + ret = numpy_to_hls_code(B, DataType.UINT2, "test", True, True) + eB = """{{ap_uint<4>("0xf", 16), ap_uint<4>("0xf", 16)}, + {ap_uint<4>("0x7", 16), ap_uint<4>("0xd", 16)}};""" + assert remove_all_whitespace(ret) == remove_all_whitespace(eB) + + +def test_finnpy_to_packed_bytearray(): + A = [[1, 1, 1, 0], [0, 1, 1, 0]] + eA = np.asarray([[14], [6]], dtype=np.uint8) + assert (finnpy_to_packed_bytearray(A, DataType.BINARY) == eA).all() + B = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + eB = np.asarray([[[15], [15]], [[7], [13]]], dtype=np.uint8) + assert (finnpy_to_packed_bytearray(B, DataType.UINT2) == eB).all() + C = [1, 7, 2, 5] + eC = np.asarray([23, 37], dtype=np.uint8) + assert (finnpy_to_packed_bytearray(C, DataType.UINT4) == eC).all() + D = [[1, 7, 2, 5], [2, 5, 1, 7]] + eD = np.asarray([[23, 37], [37, 23]], dtype=np.uint8) + assert (finnpy_to_packed_bytearray(D, DataType.UINT4) == eD).all() + E = [[-4, 0, -4, -4]] + eE = np.asarray( + [[255, 255, 255, 252, 0, 0, 0, 0, 255, 255, 255, 252, 255, 255, 255, 252]], + dtype=np.uint8, + ) + assert (finnpy_to_packed_bytearray(E, DataType.INT32) == eE).all() + + +def test_packed_bytearray_to_finnpy(): + A = np.asarray([[14], [6]], dtype=np.uint8) + eA = [[1, 1, 1, 0], [0, 1, 1, 0]] + eA = np.asarray(eA, dtype=np.float32) + shapeA = eA.shape + assert (packed_bytearray_to_finnpy(A, DataType.BINARY, shapeA) == eA).all() + B = np.asarray([[[15], [15]], [[7], [13]]], dtype=np.uint8) + eB = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + eB = np.asarray(eB, dtype=np.float32) + shapeB = eB.shape + assert (packed_bytearray_to_finnpy(B, DataType.UINT2, shapeB) == eB).all() + C = np.asarray([23, 37], dtype=np.uint8) + eC = [1, 7, 2, 5] + eC = np.asarray(eC, dtype=np.float32) + shapeC = eC.shape + assert (packed_bytearray_to_finnpy(C, DataType.UINT4, shapeC) == eC).all() + D = np.asarray([[23, 37], [37, 23]], dtype=np.uint8) + eD = [[1, 7, 2, 5], [2, 5, 1, 7]] + eD = np.asarray(eD, dtype=np.float32) + shapeD = eD.shape + assert (packed_bytearray_to_finnpy(D, DataType.UINT4, shapeD) == eD).all() + E = np.asarray( + [[255, 255, 255, 252, 0, 0, 0, 0, 255, 255, 255, 252, 255, 255, 255, 252]], + dtype=np.uint8, + ) + eE = [[-4, 0, -4, -4]] + eE = np.asarray(eE, dtype=np.float32) + shapeE = eE.shape + assert (packed_bytearray_to_finnpy(E, DataType.INT32, shapeE) == eE).all() + F = np.asarray( + [[252, 255, 255, 255, 0, 0, 0, 0, 252, 255, 255, 255, 252, 255, 255, 255]], + dtype=np.uint8, + ) + eF = [[-4, 0, -4, -4]] + eF = np.asarray(eE, dtype=np.float32) + shapeF = eF.shape + assert ( + packed_bytearray_to_finnpy( + F, DataType.INT32, shapeF, reverse_inner=True, reverse_endian=True + ) + == eF + ).all() diff --git a/tests/test_gen_FINN_dt_tensor.py b/tests/util/test_gen_finn_dt_tensor.py similarity index 60% rename from tests/test_gen_FINN_dt_tensor.py rename to tests/util/test_gen_finn_dt_tensor.py index 444b60ed42b5d13927f93ab95eeabb9727ce907f..f9944e7f5283725d4c7c3b70ead899c9e1d4ea49 100644 --- a/tests/test_gen_FINN_dt_tensor.py +++ b/tests/util/test_gen_finn_dt_tensor.py @@ -1,6 +1,32 @@ -import numpy as np +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import finn.core.utils as util +import finn.util.basic as util from finn.core.datatype import DataType diff --git a/tests/util/test_padding.py b/tests/util/test_padding.py new file mode 100644 index 0000000000000000000000000000000000000000..4e49acf12badc28bd231e990a5d02dc25d3a2006 --- /dev/null +++ b/tests/util/test_padding.py @@ -0,0 +1,57 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import numpy as np + +from finn.util.basic import pad_tensor_to_multiple_of + + +def test_pad_tensor_to_multiple_of(): + A = np.eye(3) + B = pad_tensor_to_multiple_of(A, [2, 2], val=-1) + assert B.shape == (4, 4) + assert (B[:3, :3] == A).all() + assert (B[3, :] == -1).all() + assert (B[:, 3] == -1).all() + B = pad_tensor_to_multiple_of(A, [5, 5], val=-1, distr_pad=True) + assert B.shape == (5, 5) + assert (B[1:4, 1:4] == A).all() + assert (B[0, :] == -1).all() + assert (B[:, 0] == -1).all() + assert (B[4, :] == -1).all() + assert (B[:, 4] == -1).all() + # using -1 in pad_to parameter should give an unpadded dimension + B = pad_tensor_to_multiple_of(A, [-1, 5], val=-1, distr_pad=True) + assert B.shape == (3, 5) + assert (B[:, 1:4] == A).all() + assert (B[:, 0] == -1).all() + assert (B[:, 4] == -1).all() + # if odd number of padding pixels required, 1 more should go after existing + B = pad_tensor_to_multiple_of(A, [6, 6], val=-1, distr_pad=True) + assert B.shape == (6, 6) + assert (B[1:4, 1:4] == A).all() diff --git a/tests/util/test_rtlsim2npy.py b/tests/util/test_rtlsim2npy.py new file mode 100644 index 0000000000000000000000000000000000000000..87ea5c2c57b10360ba64ac0593d4a7f5badef735 --- /dev/null +++ b/tests/util/test_rtlsim2npy.py @@ -0,0 +1,107 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import numpy as np + +from finn.core.datatype import DataType +from finn.util.data_packing import unpack_innermost_dim_from_hex_string + + +def test_unpack_innermost_dim_from_hex_string(): + # BINARY + A = np.asarray(["0x0e", "0x06"]) + dtype = DataType.BINARY + shape = (1, 2, 4) + eA = [[1, 1, 1, 0], [0, 1, 1, 0]] + A_unpacked = unpack_innermost_dim_from_hex_string(A, dtype, shape, 8) + assert (A_unpacked == eA).all() + + A = np.asarray(["0x0e", "0x06"]) + eA_flipped = [[0, 1, 1, 1], [0, 1, 1, 0]] + A_unpacked_flipped = unpack_innermost_dim_from_hex_string( + A, dtype, shape, 8, reverse_inner=True + ) + assert (A_unpacked_flipped == eA_flipped).all() + + # UINT2 + B = np.asarray([["0x0f", "0x0f"], ["0x07", "0x0d"]]) + dtype = DataType.UINT2 + shape = (1, 2, 2, 2) + eB = [[[3, 3], [3, 3]], [[1, 3], [3, 1]]] + B_unpacked = unpack_innermost_dim_from_hex_string(B, dtype, shape, 8) + assert (B_unpacked == eB).all() + + B = np.asarray([["0x0f", "0x0f"], ["0x07", "0x0d"]]) + eB_flipped = [[[3, 3], [3, 3]], [[3, 1], [1, 3]]] + B_unpacked_flipped = unpack_innermost_dim_from_hex_string( + B, dtype, shape, 8, reverse_inner=True + ) + assert (B_unpacked_flipped == eB_flipped).all() + + # INT2 + C = np.asarray([["0x0f", "0x0f"], ["0x07", "0x0d"]]) + dtype = DataType.INT2 + shape = (1, 2, 2, 2) + eC = [[[-1, -1], [-1, -1]], [[1, -1], [-1, 1]]] + C_unpacked = unpack_innermost_dim_from_hex_string(C, dtype, shape, 8) + assert (C_unpacked == eC).all() + + C = np.asarray([["0x0f", "0x0f"], ["0x07", "0x0d"]]) + dtype = DataType.INT2 + shape = (1, 2, 2, 2) + eC = [[[-1, -1], [-1, -1]], [[-1, 1], [1, -1]]] + C_unpacked = unpack_innermost_dim_from_hex_string( + C, dtype, shape, 8, reverse_inner=True + ) + assert (C_unpacked == eC).all() + + # INT4 + D = np.asarray(["0x0e", "0x06"]) + dtype = DataType.INT4 + shape = (2, 1) + eD = [[-2], [6]] + D_unpacked = unpack_innermost_dim_from_hex_string(D, dtype, shape, 8) + assert (D_unpacked == eD).all() + + D_unpacked = unpack_innermost_dim_from_hex_string( + D, dtype, shape, 8, reverse_inner=True + ) + assert (D_unpacked == eD).all() + + # INT32 + E = np.asarray(["0xffffffff", "0xfffffffe", "0x02", "0xffffffef"]) + dtype = DataType.INT32 + shape = (1, 4, 1) + eE = [[[-1], [-2], [2], [-17]]] + E_unpacked = unpack_innermost_dim_from_hex_string(E, dtype, shape, 32) + assert (E_unpacked == eE).all() + + E_unpacked = unpack_innermost_dim_from_hex_string( + E, dtype, shape, 32, reverse_inner=True + ) + assert (E_unpacked == eE).all() diff --git a/tests/util/test_shape_utils.py b/tests/util/test_shape_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ab58f591f39234b659de7c05f2592c76ebb73a3e --- /dev/null +++ b/tests/util/test_shape_utils.py @@ -0,0 +1,41 @@ +# Copyright (c) 2020, Xilinx +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of FINN nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import numpy as np + +import finn.util.basic as util + + +def test_interleave_matrix_outer_dim_from_partitions(): + A = np.eye(10) + n_parts = 2 + Ax = util.interleave_matrix_outer_dim_from_partitions(A, n_parts) + part_size = 10 // n_parts + assert Ax.shape == (n_parts, part_size, 10) + for r_ind in range(A.shape[0]): + assert (A[r_ind] == Ax[r_ind % n_parts][r_ind // n_parts]).all()