@article{Rahmaninia_2014, title={VSS SPU-EBP: Variable step size sequential partial update error back propagation algorithm}, volume={3}, url={https://sciencepubco.com/index.php/JACST/article/view/1753}, DOI={10.14419/jacst.v3i1.1753}, abstractNote={In MLP networks with hundreds of thousands of weights which must be trained on millions of samples, the time and space complexity may become greatly large and sometimes the training of network by EBP algorithm may be impractical. Sequential Partial Updating is an effective method to reduce computational load and power consumption in implementation. This new approach is very useful for the MLP networks with large number of weights in each layer that updating of each weight in each round of execution of EBP algorithm will be costly. Although this idea reduces computational cost and elapsed CPU time in each round but sometimes maybe increases number of epochs required to convergence and this leads to increase time of convergence. That is, to speed up more the convergence rate of the SPU?EBP algorithm, we propose a Variable Step Size (VSS) approach. In VSS SPU?EBP algorithm, we use a gradient based learning rate in SPU-EBP algorithm to speed up the convergence of training algorithm. In this method we derive an upper bound for the step size of SPU_EBP algorithm. Keywords: Neural Network, Error Back Propagation, MLP (Multi-Layered Perceptron), Sequential Partial Update Algorithm.}, number={1}, journal={Journal of Advanced Computer Science & Technology}, author={Rahmaninia, Maryam}, year={2014}, month={Feb.}, pages={18–27} }