@article {, title = {Integrating Deep Learning in Domain Science at Exascale (MagmaDNN)}, year = {2020}, month = {2020-12}, publisher = {DOD HPCMP seminar}, address = {virtual}, abstract = {We will present some of the current challenges in the design and integration of deep learning AI with traditional HPC simulations. We evaluate existing packages for readiness to run efficiently deep learning models and applications on large scale HPC systems, identify challenges, and propose new asynchronous parallelization and optimization techniques for current large-scale heterogeneous systems and up-coming exascale systems. These developments, along with existing HPC AI software capabilities, have been integrated in MagmaDNN, an open source HPC deep learning framework. Many deep learning frameworks are targeted towards data scientists and fall short in providing quality integration into existing HPC workflows. This paper discusses the necessities of an HPC deep learning framework and how these can be provided, e.g., as in MagmaDNN, through a deep integration with existing HPC libraries such as MAGMA and its modular memory management, MPI, CuBLAS, CuDNN, MKL, and HIP. Advancements are also illustrated through the use of algorithmic enhancements in reduced and mixed-precision and asynchronous optimization methods. Finally, we present illustrations and potential solutions on enhancing traditional compute and data intensive applications at ORNL and UTK with AI. The approaches and future challenges are illustrated on materials science, imaging, and climate applications.}, author = {Stanimire Tomov and Kwai Wong and Jack Dongarra and Rick Archibald and Edmond Chow and Eduardo D{\textquoteright}Azevedo and Markus Eisenbach and Rocco Febbo and Florent Lopez and Daniel Nichols and Junqi Yin} }