- Journal Home
- Volume 43 - 2025
- Volume 42 - 2024
- Volume 41 - 2023
- Volume 40 - 2022
- Volume 39 - 2021
- Volume 38 - 2020
- Volume 37 - 2019
- Volume 36 - 2018
- Volume 35 - 2017
- Volume 34 - 2016
- Volume 33 - 2015
- Volume 32 - 2014
- Volume 31 - 2013
- Volume 30 - 2012
- Volume 29 - 2011
- Volume 28 - 2010
- Volume 27 - 2009
- Volume 26 - 2008
- Volume 25 - 2007
- Volume 24 - 2006
- Volume 23 - 2005
- Volume 22 - 2004
- Volume 21 - 2003
- Volume 20 - 2002
- Volume 19 - 2001
- Volume 18 - 2000
- Volume 17 - 1999
- Volume 16 - 1998
- Volume 15 - 1997
- Volume 14 - 1996
- Volume 13 - 1995
- Volume 12 - 1994
- Volume 11 - 1993
- Volume 10 - 1992
- Volume 9 - 1991
- Volume 8 - 1990
- Volume 7 - 1989
- Volume 6 - 1988
- Volume 5 - 1987
- Volume 4 - 1986
- Volume 3 - 1985
- Volume 2 - 1984
- Volume 1 - 1983
Convergence of Gradient Method with Momentum for Back-Propagation Neural Networks
Cited by
Export citation
- BibTex
- RIS
- TXT
@Article{JCM-26-613,
author = {Wei Wu, Naimin Zhang, Zhengxue Li, Long Li and Yan Liu},
title = {Convergence of Gradient Method with Momentum for Back-Propagation Neural Networks},
journal = {Journal of Computational Mathematics},
year = {2008},
volume = {26},
number = {4},
pages = {613--623},
abstract = {
In this work, a gradient method with momentum for BP neural networks is considered. The momentum coefficient is chosen in an adaptive manner to accelerate and stabilize the learning procedure of the network weights. Corresponding convergence results are proved.
}, issn = {1991-7139}, doi = {https://doi.org/}, url = {http://global-sci.org/intro/article_detail/jcm/8645.html} }
TY - JOUR
T1 - Convergence of Gradient Method with Momentum for Back-Propagation Neural Networks
AU - Wei Wu, Naimin Zhang, Zhengxue Li, Long Li & Yan Liu
JO - Journal of Computational Mathematics
VL - 4
SP - 613
EP - 623
PY - 2008
DA - 2008/08
SN - 26
DO - http://doi.org/
UR - https://global-sci.org/intro/article_detail/jcm/8645.html
KW - Back-propagation (BP) neural networks, Gradient method, Momentum, Convergence.
AB -
In this work, a gradient method with momentum for BP neural networks is considered. The momentum coefficient is chosen in an adaptive manner to accelerate and stabilize the learning procedure of the network weights. Corresponding convergence results are proved.
Wei Wu, Naimin Zhang, Zhengxue Li, Long Li and Yan Liu. (2008). Convergence of Gradient Method with Momentum for Back-Propagation Neural Networks.
Journal of Computational Mathematics. 26 (4).
613-623.
doi:
Copy to clipboard