Designing a scientific software stack to meet the needs of the
next-generation of mesh-based simulation demands, not only scalable and
efficient mesh and data management on a wide range of platforms, but also an
abstraction layer that makes it useful for a wide range of application codes.
Common utility tasks, such as file I/O, mesh distribution, and work
partitioning, should be delegated to external libraries in order to promote
code re-use, extensibility and software interoperability. In this paper we
demonstrate the use of PETSc's DMPlex data management API to perform mesh input
and domain partitioning in Fluidity, a large scale CFD application. We
demonstrate that raising the level of abstraction adds new functionality to the
application code, such as support for additional mesh file formats and mesh re-
ordering, while improving simulation startup cost through more efficient mesh
distribution. Moreover, the separation of concerns accomplished through this
interface shifts critical performance and interoperability issues, such as
scalable I/O and file format support, to a widely used and supported open
source community library, improving the sustainability, performance, and
functionality of Fluidity.
%0 Generic
%1 citeulike:13987209
%A Lange, Michael
%A Knepley, Matthew G.
%A Gorman, Gerard J.
%D 2015
%J arXiv Computer Science > Mathematical Software
%K exodusii, firedrake, gmsh, petsc 65n50-pdes-bvps-mesh-generation-and-refinement 65-01-numerical-analysis-instructional-exposition 68p05-data-structures 68p20-information-storage-and-retrieval 68n30-mathematical-aspects-of-software-engineering 55u05-abstract-complexes
%N 1505.04633
%T Flexible, Scalable Mesh and Data Management using PETSc DMPlex
%U http://arxiv.org/abs/1505.04633
%X Designing a scientific software stack to meet the needs of the
next-generation of mesh-based simulation demands, not only scalable and
efficient mesh and data management on a wide range of platforms, but also an
abstraction layer that makes it useful for a wide range of application codes.
Common utility tasks, such as file I/O, mesh distribution, and work
partitioning, should be delegated to external libraries in order to promote
code re-use, extensibility and software interoperability. In this paper we
demonstrate the use of PETSc's DMPlex data management API to perform mesh input
and domain partitioning in Fluidity, a large scale CFD application. We
demonstrate that raising the level of abstraction adds new functionality to the
application code, such as support for additional mesh file formats and mesh re-
ordering, while improving simulation startup cost through more efficient mesh
distribution. Moreover, the separation of concerns accomplished through this
interface shifts critical performance and interoperability issues, such as
scalable I/O and file format support, to a widely used and supported open
source community library, improving the sustainability, performance, and
functionality of Fluidity.
%7 1
@misc{citeulike:13987209,
abstract = {{Designing a scientific software stack to meet the needs of the
next-generation of mesh-based simulation demands, not only scalable and
efficient mesh and data management on a wide range of platforms, but also an
abstraction layer that makes it useful for a wide range of application codes.
Common utility tasks, such as file I/O, mesh distribution, and work
partitioning, should be delegated to external libraries in order to promote
code re-use, extensibility and software interoperability. In this paper we
demonstrate the use of PETSc's DMPlex data management API to perform mesh input
and domain partitioning in Fluidity, a large scale CFD application. We
demonstrate that raising the level of abstraction adds new functionality to the
application code, such as support for additional mesh file formats and mesh re-
ordering, while improving simulation startup cost through more efficient mesh
distribution. Moreover, the separation of concerns accomplished through this
interface shifts critical performance and interoperability issues, such as
scalable I/O and file format support, to a widely used and supported open
source community library, improving the sustainability, performance, and
functionality of Fluidity.}},
added-at = {2017-06-29T07:13:07.000+0200},
archiveprefix = {arXiv},
author = {Lange, Michael and Knepley, Matthew G. and Gorman, Gerard J.},
biburl = {https://www.bibsonomy.org/bibtex/28af3fdbc94870eb85683aac25934bd87/gdmcbain},
citeulike-article-id = {13987209},
citeulike-attachment-1 = {lange_15_flexible_1060383.pdf; /pdf/user/gdmcbain/article/13987209/1060383/lange_15_flexible_1060383.pdf; f09094908a7c34e56ab819cbc46291b37a70e4c6},
citeulike-linkout-0 = {http://arxiv.org/abs/1505.04633},
citeulike-linkout-1 = {http://arxiv.org/pdf/1505.04633},
day = 18,
edition = 1,
eprint = {1505.04633},
file = {lange_15_flexible_1060383.pdf},
interhash = {53556d5a66a24f6a0ab0300913ab4d18},
intrahash = {8af3fdbc94870eb85683aac25934bd87},
journal = {arXiv Computer Science > Mathematical Software},
keywords = {exodusii, firedrake, gmsh, petsc 65n50-pdes-bvps-mesh-generation-and-refinement 65-01-numerical-analysis-instructional-exposition 68p05-data-structures 68p20-information-storage-and-retrieval 68n30-mathematical-aspects-of-software-engineering 55u05-abstract-complexes},
month = may,
number = {1505.04633},
posted-at = {2016-03-24 11:35:06},
priority = {0},
timestamp = {2023-11-06T23:45:00.000+0100},
title = {Flexible, Scalable Mesh and Data Management using {PETSc} {DMPlex}},
url = {http://arxiv.org/abs/1505.04633},
year = 2015
}