@inproceedings{vemuri2024functional, type = {inproceedings}, key = {vemuri2024functional}, title = {Functional Tensor Decompositions for Physics-Informed Neural Networks}, author = {Sai Karthikeya Vemuri and Tim Büchner and Julia Niebling and Joachim Denzler}, booktitle = {International Conference on Pattern Recognition (ICPR)}, year = {2024}, code = {https://github.com/cvjena/TensorDecompositions4PINNs}, doi = {https://doi.org/10.48550/arXiv.2408.13101}, url = {https://arxiv.org/abs/2408.13101}, groups = {}, note = {(accepted at ICPR)}, langid = {english}, abstract = {Physics-Informed Neural Networks (PINNs) have shown continuous promise in approximating partial differential equations (PDEs), although they remain constrained by the curse of dimensionality. In this paper, we propose a generalized PINN version of the classical variable separable method. To do this, we first show that, using the universal approximation theorem, a multivariate function can be approximated by the outer product of neural networks, whose inputs are separated variables. We leverage tensor decomposition forms to separate the variables in a PINN setting. By employing Canonic Polyadic (CP), Tensor-Train (TT), and Tucker decomposition forms within the PINN framework, we create robust architectures for learning multivariate functions from separate neural networks connected by outer products. Our methodology significantly enhances the performance of PINNs, as evidenced by improved results on complex high-dimensional PDEs, including the 3d Helmholtz and 5d Poisson equations, among others. This research underscores the potential of tensor decomposition-based variably separated PINNs to surpass the state-of-the-art, offering a compelling solution to the dimensionality challenge in PDE approximation.}, }