@misc{aydin_general_multifidelity_2019, author={Aydin, R.C.,Braeu, F.A.,Cyron, C.J.}, title={General multi-fidelity framework for training artificial neural networks with computational models}, year={2019}, howpublished = {journal article}, doi = {https://doi.org/10.3389/fmats.2019.00061}, abstract = {Training of artificial neural networks (ANNs) relies on the availability of training data. If ANNs have to be trained to predict or control the behavior of complex physical systems, often not enough real-word training data are available, for example, because experiments or measurements are too expensive, time-consuming or dangerous. In this case, generating training data by way of realistic computational simulations is a viable and often the only promising alternative. Doing so can, however, be associated with a significant and often even prohibitive computational cost, which forms a serious bottleneck for the application of machine learning to complex physical systems. To overcome this problem, we propose in this paper a both systematic and general approach. It uses cheap low-fidelity computational models to start the training of the ANN and gradually switches to higher-fidelity training data as the training of the ANN progresses. We demonstrate the benefits of this strategy using examples from structural and materials mechanics. We demonstrate that in these examples the multi-fidelity strategy introduced herein can reduce the total computational cost – compared to simple brute-force training of ANNs – by a half up to one order of magnitude. This multi-fidelity strategy can thus be hoped to become a powerful and versatile tool for the future combination of computational simulations and artificial intelligence, in particular in areas such as structural and materials mechanics.}, note = {Online available at: \url{https://doi.org/10.3389/fmats.2019.00061} (DOI). Aydin, R.; Braeu, F.; Cyron, C.: General multi-fidelity framework for training artificial neural networks with computational models. Frontiers in Materials. 2019. vol. 6, 61. DOI: 10.3389/fmats.2019.00061}}