@Article{JCM-36-881, author = {Chen , ChengWen , Zaiwen and Yuan , Yaxiang}, title = {A General Two-Level Subspace Method for Nonlinear Optimization}, journal = {Journal of Computational Mathematics}, year = {2018}, volume = {36}, number = {6}, pages = {881--902}, abstract = {
A new two-level subspace method is proposed for solving the general unconstrained minimization formulations discretized from infinite-dimensional optimization problems. At each iteration, the algorithm executes either a direct step on the current level or a coarse subspace correction step. In the coarse subspace correction step, we augment the traditional coarse grid space by a two-dimensional subspace spanned by the coordinate direction and the gradient direction at the current point. Global convergence is proved and convergence rate is studied under some mild conditions on the discretized functions. Preliminary numerical experiments on a few variational problems show that our two-level subspace method is promising.