@Article{JML-1-60, author = {Yaoyu and Zhang and and 22779 and and Yaoyu Zhang and Yuqing and Li and and 22780 and and Yuqing Li and Zhongwang and Zhang and and 22781 and and Zhongwang Zhang and Tao and Luo and and 22782 and and Tao Luo and Zhi-Qin and John Xu and and 22783 and and Zhi-Qin John Xu}, title = {Embedding Principle: A Hierarchical Structure of Loss Landscape of Deep Neural Networks}, journal = {Journal of Machine Learning}, year = {2022}, volume = {1}, number = {1}, pages = {60--113}, abstract = {

We prove a general Embedding Principle of loss landscape of deep neural networks (NNs) that unravels a hierarchical structure of the loss landscape of NNs, i.e., loss landscape of an NN contains all critical points of all the narrower NNs. This result is obtained by constructing a class of critical embeddings which map any critical point of a narrower NN to a critical point of the target NN with the same output function. By discovering a wide class of general compatible critical embeddings, we provide a gross estimate of the dimension of critical submanifolds embedded from critical points of narrower NNs. We further prove an irreversibility property of any critical embedding that the number of negative/zero/positive eigenvalues of the Hessian matrix of a critical point may increase but never decrease as an NN becomes wider through the embedding. Using a special realization of general compatible critical embedding, we prove a stringent necessary condition for being a “truly-bad” critical point that never becomes a strict-saddle point through any critical embedding. This result implies the commonplace of strict-saddle points in wide NNs, which may be an important reason underlying the easy optimization of wide NNs widely observed in practice.

}, issn = {2790-2048}, doi = {https://doi.org/10.4208/jml.220108}, url = {http://global-sci.org/intro/article_detail/jml/20372.html} }