Commit 792aa67b authored by sfritschi's avatar sfritschi
Browse files

Updated thesis

parent 053bb006
......@@ -30,7 +30,7 @@ def plot_flux(inpores, outpores, p, Q, solver_name):
plt.ylabel(r"Flux [$m^3s^{-1}$]")
plt.plot(x, Qp, label=r"out-going")
plt.plot(x, Qm, label=r"in-coming")
plt.plot(x, Qs, label=r"sum")
plt.plot(x, Qs, label=r"sum", linewidth=2)
plt.legend()
plt.savefig("../thesis/plots/flux_{}.png".format(solver_name))
plt.close()
......
thesis/plots/flux_AMG.png

37.9 KB | W: | H:

thesis/plots/flux_AMG.png

36.5 KB | W: | H:

thesis/plots/flux_AMG.png
thesis/plots/flux_AMG.png
thesis/plots/flux_AMG.png
thesis/plots/flux_AMG.png
  • 2-up
  • Swipe
  • Onion skin
thesis/plots/flux_CG.png

37.7 KB | W: | H:

thesis/plots/flux_CG.png

36.3 KB | W: | H:

thesis/plots/flux_CG.png
thesis/plots/flux_CG.png
thesis/plots/flux_CG.png
thesis/plots/flux_CG.png
  • 2-up
  • Swipe
  • Onion skin
thesis/plots/flux_ILU.png

37.3 KB | W: | H:

thesis/plots/flux_ILU.png

35.9 KB | W: | H:

thesis/plots/flux_ILU.png
thesis/plots/flux_ILU.png
thesis/plots/flux_ILU.png
thesis/plots/flux_ILU.png
  • 2-up
  • Swipe
  • Onion skin
thesis/plots/flux_PETSC.png

38 KB | W: | H:

thesis/plots/flux_PETSC.png

36.6 KB | W: | H:

thesis/plots/flux_PETSC.png
thesis/plots/flux_PETSC.png
thesis/plots/flux_PETSC.png
thesis/plots/flux_PETSC.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -32,10 +32,13 @@
\abx@aux@cite{hypre-web-page}
\abx@aux@segm{0}{0}{hypre-web-page}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Solver}{3}{subsection.2.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3}Results}{3}{subsection.2.3}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Pressures $p_{\mathrm {in}}$ and $p_{\mathrm {out}}$ are applied to in-pores and out-pores respectively, driving the network flow. The resulting pressure system is solved with the respective solvers from above and the mean total pore flux is shown in each case. With PETSc using 4 processes to solve the system. (AMG = algebraic multi-grid, CG = conjugate gradients, ILU = incomplete LU-preconditioning + GMRES)}}{4}{figure.1}\protected@file@percent }
\newlabel{fig:balance}{{1}{4}{Pressures $p_{\mathrm {in}}$ and $p_{\mathrm {out}}$ are applied to in-pores and out-pores respectively, driving the network flow. The resulting pressure system is solved with the respective solvers from above and the mean total pore flux is shown in each case. With PETSc using 4 processes to solve the system. (AMG = algebraic multi-grid, CG = conjugate gradients, ILU = incomplete LU-preconditioning + GMRES)}{figure.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3}Limitations}{3}{subsection.2.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.4}Results}{3}{subsection.2.4}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Pressures $p_{\mathrm {in}}$ and $p_{\mathrm {out}}$ are applied to in-pores and out-pores respectively, driving the network flow. The resulting pressure system is solved with the respective solvers from above and the different fluxes are shown for all pores. With PETSc using 4 processes to solve the system. (AMG = algebraic multi-grid, CG = conjugate gradients, ILU = incomplete LU-preconditioning + GMRES)\relax }}{4}{figure.caption.1}\protected@file@percent }
\providecommand*\caption@xref[2]{\@setref\relax\@undefined{#1}}
\newlabel{fig:balance}{{1}{4}{Pressures $p_{\mathrm {in}}$ and $p_{\mathrm {out}}$ are applied to in-pores and out-pores respectively, driving the network flow. The resulting pressure system is solved with the respective solvers from above and the different fluxes are shown for all pores. With PETSc using 4 processes to solve the system. (AMG = algebraic multi-grid, CG = conjugate gradients, ILU = incomplete LU-preconditioning + GMRES)\relax }{figure.caption.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {A}PETSc Installation}{4}{appendix.A}\protected@file@percent }
\newlabel{appendix:install}{{A}{4}{PETSc Installation}{appendix.A}{}}
\abx@aux@refcontextdefaultsdone
\abx@aux@defaultrefcontext{0}{MEYER2021103936}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{petsc-web-page}{none/global//global/global}
......
This diff is collapsed.
......@@ -2,5 +2,6 @@
\BOOKMARK [1][-]{section.2}{Parallel Flow Solver}{}% 2
\BOOKMARK [2][-]{subsection.2.1}{PETSc Interface}{section.2}% 3
\BOOKMARK [2][-]{subsection.2.2}{Solver}{section.2}% 4
\BOOKMARK [2][-]{subsection.2.3}{Results}{section.2}% 5
\BOOKMARK [1][-]{appendix.A}{PETSc Installation}{}% 6
\BOOKMARK [2][-]{subsection.2.3}{Limitations}{section.2}% 5
\BOOKMARK [2][-]{subsection.2.4}{Results}{section.2}% 6
\BOOKMARK [1][-]{appendix.A}{PETSc Installation}{}% 7
No preview for this file type
No preview for this file type
......@@ -6,6 +6,7 @@
% packages
\usepackage[backend=biber, sorting=none]{biblatex}
\usepackage[labelfont=bf]{caption} % figure captions (bold-face)
\usepackage{multicol} % multicolumn environment
\usepackage{tikz} % figures
\usepackage{hyperref} % web-links
......@@ -39,7 +40,7 @@
\newpage
\begin{center} \Large{\textbf{Abstract}} \end{center}
\hspace{0.5cm}This thesis is based on \cite{MEYER2021103936} and involves the parallelization of core functions in the \emph{netflow} Python module introduced in aforementioned paper, using existing MPI-based (openMPI) parallel solvers from the PETSc toolkit \cite{petsc-web-page}.
\hspace{0.5cm}To study the flow properties of large void-space geometries found in porous media such as f.ex. soil or gravel, \cite{MEYER2021103936} describes and implements routines for the generation \& simulation of flow networks representing the complicated pore-arrangement. Based on a relatively small base network acquired via tomographic scans, the generated flow network is of intermediate size (millions of pores). To extend this procedure to even larger networks (up to 100 millions of pores), parallel computing is employed for both generation of pore-networks as well as solving the flow for said networks. In the latter, we rely on existing MPI-based parallel solvers from the PETSc \cite{petsc-web-page} toolkit. See ~\ref{appendix:install} for installation details.
\vspace{5ex}
\begin{multicols}{2}
......@@ -51,15 +52,17 @@
\hspace{0.5cm}Despite the availability of Python package \emph{petsc4py}, we decided to use the native implementation in C instead. This choice was motivated by the fact that the latter is better maintained, and the inclusion of an additional Python module would further complicate the dependency tree on the Python side. In order to interface the chosen C API of PETSc with the \emph{netflow} Python module, we rely on Cython to wrap the C source in Python, that is subsequently compiled with all required compilation and linking flags of PETSc. This allows us to invoke a \verb|solve_py()| function from Python delegating the relevant parameters, namely the system matrix and right-hand-side vector, to the C function \verb|solve()|. Here, the system matrix, in the compressed row storage format, is converted into PETSc's internal representation for sparse and distributed matrices called \verb|Mat|. The same applies to the r.h.s. which is used to initialize a distributed vector object in PETSc, i.e. \verb|Vec|. When the solution has been computed, it is communicated in full to the root rank via a call to \verb|MPI_Gatherv()|. It is stored in a Cython memoryview, converted back into a numpy array, and finally returned by \verb|solve_py()|.
\subsection{Solver}
\hspace{0.5cm}The actual solver written in C then utilizes PETSc's collection of krylov-subspace (KSP) methods to iteratively approximate the solution of the system in parallel with the available MPI processes. To avoid data duplication, the initial assembly of PETSc objects is only done on the root rank and then communicated in parts to the corresponding ranks through PETSc's collective \verb|Assembly| routines. The iterative method chosen to solve the non-symmetric pressure system, arising from the flow network, is GMRES together with a left algebraic multi-grid preconditioner supplied via hypre \cite{hypre-web-page}.
\subsection{Limitations}
\hspace{0.5cm}Since the datastructure used to represent the pores in \emph{netflow} is a Python \verb|set|, the order of the pores is arbitrary. In particular this means each MPI process sees a different ordering from eachother, which necessitates initialization of the full system matrix \& r.h.s. on a given root rank, such that the ordering is consistent for all ranks. This requires additional communication, but prevents duplication of the data associated with the matrix etc. on the remaining processes.
\subsection{Results}
\hspace{0.5cm}In order to assess the quality of the pressure-solution obtained by this solver, we study the fluxes induced by the pore pressures for a given base network comprised of 2636 pores. In particular, we look at the sum of all in- and out-going fluxes per pore, obtained from the function \verb|flux_balance()|, and aggregated over all pores of the network. As expected from the conservation of mass, the mean is close to $0$ ($\approx 10^{-10}$) and the maximum is $\approx 10^{-7}$, which is in complete agreement with existing single-core solvers implemented in \emph{netflow}, see Figure ~\ref{fig:balance}.
\hspace{0.5cm}In order to assess the quality of the pressure-solution obtained by this solver, we study the fluxes induced by the pore pressures for a given base network comprised of 2636 pores. In particular, we look at all in- and out-going fluxes as well as their sum on a per pore basis (except for source/sink pores), obtained from the function \verb|flux_balance()|. When we complete this analysis for all available solvers and compare the results of the parallel PETSc solver with the existing single-core solvers, we see that the parallel version is in complete agreement with the rest in terms of solution quality, as depicted in Figure ~\ref{fig:balance}.
\end{multicols}
\begin{figure}[h]
\centering
\includegraphics[width=0.8\textwidth]{plots/flux_PETSC.png}
\caption{Pressures $p_{\mathrm{in}}$ and $p_{\mathrm{out}}$ are applied to in-pores and out-pores respectively, driving the network flow. The resulting pressure system is solved with the respective solvers from above and the mean total pore flux is shown in each case. With PETSc using 4 processes to solve the system. (AMG = algebraic multi-grid, CG = conjugate gradients, ILU = incomplete LU-preconditioning + GMRES)}
\caption{Pressures $p_{\mathrm{in}}$ and $p_{\mathrm{out}}$ are applied to in-pores and out-pores respectively, driving the network flow. The resulting pressure system is solved with the respective solvers from above and the different fluxes are shown for all pores. With PETSc using 4 processes to solve the system. (AMG = algebraic multi-grid, CG = conjugate gradients, ILU = incomplete LU-preconditioning + GMRES)}
\label{fig:balance}
\end{figure}
......@@ -68,6 +71,7 @@
\appendix
\begin{center} \Large{\textbf{Appendix}} \end{center}
\section{PETSc Installation}
\label{appendix:install}
For the purposes of this thesis, PETSc was installed in the following way. Given \textbf{existing} (open)MPI compilers located at \verb|/usr/bin/|.
\begin{itemize}
\item Clone \href{https://gitlab.com/petsc/petsc}{PETSc repository}.
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment