Institut für Parallele und Verteilte Systeme (IPVS)

Publikationen

Eine Übersicht der Publikationen des Instituts für Parallele und Verteilte Systeme

Publikationen SGS: Bibliographie 2020 BibTeX

 
@inproceedings {INPROC-2020-30,
   author = {Alireza Naseri and Amin Totounferoush and Ignacio Gonzales and Miriam Mehl and Carlos P{\'e}rez-Segarra},
   title = {{A scalable framework for the partitioned solution of fluid–structure interaction problems}},
   booktitle = {Computational Mechanics},
   publisher = {Springer},
   institution = {Universit{\"a}t Stuttgart, Fakult{\"a}t Informatik, Elektrotechnik und Informationstechnik, Germany},
   type = {Konferenz-Beitrag},
   month = {Mai},
   year = {2020},
   isbn = {10.1007/s00466-020-01860-y},
   keywords = {Mehl, Miriam; P{\'e}rez-Segarra, Carlos},
   language = {Englisch},
   cr-category = {G.1.8 Partial Differential Equations,     J.2 Physical Sciences and Engineering,     J.3 Life and Medical Sciences},
   ee = {https://link.springer.com/article/10.1007/s00466-020-01860-y},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Parallele und Verteilte Systeme, Simulation gro{\ss}er Systeme},
   abstract = {In this work, we present a scalable and efficient parallel solver for the partitioned solution of fluid–structure interaction problems through multi-code coupling. Two instances of an in-house parallel software, TermoFluids, are used to solve the fluid and the structural sub-problems, coupled together on the interface via the preCICE coupling library. For fluid flow, the Arbitrary Lagrangian–Eulerian form of the Navier–Stokes equations is solved on an unstructured conforming grid using a second-order finite-volume discretization. A parallel dynamic mesh method for unstructured meshes is used to track the moving boundary. For the structural problem, the nonlinear elastodynamics equations are solved on an unstructured grid using a second-order finite-volume method. A semi-implicit FSI coupling method is used which segregates the fluid pressure term and couples it strongly to the structure, while the remaining fluid terms and the geometrical nonlinearities are only loosely coupled. A robust and advanced multi-vector quasi-Newton method is used for the coupling iterations between the solvers. Both the fluid and the structural solver use distributed-memory parallelism. The intra-solver communication required for data update in the solution process is carried out using non-blocking point-to-point communicators. The inter-code communication is fully parallel and point-to-point, avoiding any central communication unit. Inside each single-physics solver, the load is balanced by dividing the computational domain into fairly equal blocks for each process. Additionally, a load balancing model is used at the inter-code level to minimize the overall idle time of the processes. Two practical test cases in the context of hemodynamics are studied, demonstrating the accuracy and computational efficiency of the coupled solver. Strong scalability test results show a parallel efficiency of 83\% on 10,080 CPU cores.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2020-30&engl=0}
}
@inproceedings {INPROC-2020-16,
   author = {Steffen Hirschmann and Andreas Kronenburg and Colin W. Glass and Dirk Pfl{\"u}ger},
   title = {{Load-Balancing for Large-Scale Soot Particle Agglomeration Simulations}},
   booktitle = {Parallel Computing: Technology Trends},
   editor = {Ian Foster and Gerhard R. Joubert and Ludek Kucera and Wolfgang E. Nagel and Frans Peters},
   publisher = {IOS Press},
   institution = {Universit{\"a}t Stuttgart, Fakult{\"a}t Informatik, Elektrotechnik und Informationstechnik, Germany},
   series = {Advances in Parallel Computing},
   volume = {36},
   pages = {147--156},
   type = {Konferenz-Beitrag},
   month = {M{\"a}rz},
   year = {2020},
   doi = {10.3233/APC200035},
   language = {Englisch},
   cr-category = {G.0 Mathematics of Computing General},
   ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2020-16/INPROC-2020-16.pdf},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Parallele und Verteilte Systeme, Simulation gro{\ss}er Systeme},
   abstract = {In this work, we combine several previous efforts to simulate a large-scale soot particle agglomeration with a dynamic, multi-scale turbulent background flow field. We build upon previous simulations which include 3.2 million particles and implement load-balancing into the used simulation software as well as tests of the load-balancing mechanisms on this scenario. We increase the simulation to 109.85 million particles, superpose a dynamically changing multi-scale background flow field and use our software enhancements to the molecular dynamics software ESPResSo to simulate this on a Cray XC40 supercomputer. To verify that our setup reproduces essential physics we scale the influence of the flow field down to make the scenario mostly homogeneous on the subdomain scale. Finally, we show that even on the homogeneous version of this soot particle agglomeration simulation, load-balancing still pays off.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2020-16&engl=0}
}
@article {ART-2020-09,
   author = {Alireza Naseri and Amin Totounferoush and Ignacio Gonzales and Miriam Mehl and Carlos David Perez-Segarra},
   title = {{A scalable framework for the partitioned solution of fluid–structure interaction problems}},
   journal = {Computational Mechanics},
   publisher = {Springer Verlag},
   volume = {66},
   pages = {471--489},
   type = {Artikel in Zeitschrift},
   month = {Mai},
   year = {2020},
   isbn = {https://doi.org/10.1007/s00466-020-01860-y},
   keywords = {Fluid-Structure Interaction; Partitioned Method; Multi-Code Coupling; Scalability; HPC},
   language = {Englisch},
   cr-category = {J.2 Physical Sciences and Engineering,     J.3 Life and Medical Sciences,     I.6.3 Simulation and Modeling Applications},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Parallele und Verteilte Systeme, Simulation gro{\ss}er Systeme},
   abstract = {In this work, we present a scalable and efficient parallel solver for the partitioned solution of fluid–structure interaction problems through multi-code coupling. Two instances of an in-house parallel software, TermoFluids, are used to solve the fluid and the structural sub-problems, coupled together on the interface via the preCICE coupling library. For fluid flow, the Arbitrary Lagrangian–Eulerian form of the Navier–Stokes equations is solved on an unstructured conforming grid using a second-order finite-volume discretization. A parallel dynamic mesh method for unstructured meshes is used to track the moving boundary. For the structural problem, the nonlinear elastodynamics equations are solved on an unstructured grid using a second-order finite-volume method. A semi-implicit FSI coupling method is used which segregates the fluid pressure term and couples it strongly to the structure, while the remaining fluid terms and the geometrical nonlinearities are only loosely coupled. A robust and advanced multi-vector quasi-Newton method is used for the coupling iterations between the solvers. Both the fluid and the structural solver use distributed-memory parallelism. The intra-solver communication required for data update in the solution process is carried out using non-blocking point-to-point communicators. The inter-code communication is fully parallel and point-to-point, avoiding any central communication unit. Inside each single-physics solver, the load is balanced by dividing the computational domain into fairly equal blocks for each process. Additionally, a load balancing model is used at the inter-code level to minimize the overall idle time of the processes. Two practical test cases in the context of hemodynamics are studied, demonstrating the accuracy and computational efficiency of the coupled solver. Strong scalability test results show a parallel efficiency of 83\% on 10,080 CPU cores.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=ART-2020-09&engl=0}
}
@article {ART-2020-08,
   author = {Shashank Subramanian and Klaudius Scheufele and Miriam Mehl and George Biros},
   title = {{Where did the tumor start? An inverse solver with sparse localization for tumor growth models}},
   journal = {Inverse Problems},
   publisher = {IOP Publisher},
   volume = {36},
   number = {4},
   type = {Artikel in Zeitschrift},
   month = {Februar},
   year = {2020},
   isbn = {10.1088/1361-6420/ab649c},
   language = {Englisch},
   cr-category = {G.1.2 Numerical Analysis Approximation,     G.1.6 Numerical Analysis Optimization,     G.1.8 Partial Differential Equations,     I.4 Image Processing and Computer Vision,     I.6.8 Types of Simulation,     J.3 Life and Medical Sciences},
   ee = {https://iopscience.iop.org/article/10.1088/1361-6420/ab649c,     https://arxiv.org/abs/1907.06564},
   contact = {miriam.mehl@ipvs.uni-stuttgart.de},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Parallele und Verteilte Systeme, Simulation gro{\ss}er Systeme},
   abstract = {We present a numerical scheme for solving an inverse problem for parameter estimation in tumor growth models for glioblastomas, a form of aggressive primary brain tumor. The growth model is a reaction–diffusion partial differential equation (PDE) for the tumor concentration. We use a PDE-constrained optimization formulation for the inverse problem. The unknown parameters are the reaction coefficient (proliferation), the diffusion coefficient (infiltration), and the initial condition field for the tumor PDE. Segmentation of magnetic resonance imaging (MRI) scans drive the inverse problem where segmented tumor regions serve as partial observations of the tumor concentration. Like most cases in clinical practice, we use data from a single time snapshot. Moreover, the precise time relative to the initiation of the tumor is unknown, which poses an additional difficulty for inversion. We perform a frozen-coefficient spectral analysis and show that the inverse problem is severely ill-posed. We introduce a biophysically motivated regularization on the structure and magnitude of the tumor initial condition. In particular, we assume that the tumor starts at a few locations (enforced with a sparsity constraint on the initial condition of the tumor) and that the initial condition magnitude in the maximum norm is equal to one. We solve the resulting optimization problem using an inexact quasi-Newton method combined with a compressive sampling algorithm for the sparsity constraint. Our implementation uses PETSc and AccFFT libraries. We conduct numerical experiments on synthetic and clinical images to highlight the improved performance of our solver over a previously existing solver that uses standard two-norm regularization for the calibration parameters. The existing solver is unable to localize the initial condition. Our new solver can localize the initial condition and recover infiltration and proliferation. In clinical datasets (for which the ground truth is unknown), our solver results in qualitatively different solutions compared to the two-norm regularized solver.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=ART-2020-08&engl=0}
}
 
Zum Seitenanfang