@article{zhoudx2020universality,
title = {Universality of deep convolutional neural networks},
journal = {Applied and Computational Harmonic Analysis},
volume = {48},
number = {2},
pages = {787-794},
year = {2020},
issn = {1063-5203},
doi = {https://doi.org/10.1016/j.acha.2019.06.004},
url = {https://www.sciencedirect.com/science/article/pii/S1063520318302045},
author = {Ding-Xuan Zhou},
keywords = {Deep learning, Convolutional neural network, Universality, Approximation theory},
abstract = {Deep learning has been widely applied and brought breakthroughs in speech recognition, computer vision, and many other domains. Deep neural network architectures and computational issues have been well studied in machine learning. But there lacks a theoretical foundation for understanding the approximation or generalization ability of deep learning methods generated by the network architectures such as deep convolutional neural networks. Here we show that a deep convolutional neural network (CNN) is universal, meaning that it can be used to approximate any continuous function to an arbitrary accuracy when the depth of the neural network is large enough. This answers an open question in learning theory. Our quantitative estimate, given tightly in terms of the number of free parameters to be computed, verifies the efficiency of deep CNNs in dealing with large dimensional data. Our study also demonstrates the role of convolutions in deep CNNs.}
}


@book{Galdi2011An,
  title={An Introduction to the Mathematical Theory of the Navier-Stokes Equations},
  author={Galdi},
  publisher={An Introduction to the Mathematical Theory of the Navier-Stokes Equations},
  year={2011},
}

@Misc{ACheritat,
howpublished = {[EB/OL]},
note = {\url{https://www.math.univ-toulouse.fr/~cheritat/wiki-draw/index.php/Mandelbrot_set} Accessed June 29, 2022},
title = {Mandelbrot set},
author = {Arnaud Chéritat}
}