13 #ifndef MLPACK_CORE_OPTIMIZERS_AMS_GRAD_AMS_GRAD_UPDATE_HPP 14 #define MLPACK_CORE_OPTIMIZERS_AMS_GRAD_AMS_GRAD_UPDATE_HPP 19 namespace optimization {
49 const double beta1 = 0.9,
50 const double beta2 = 0.999) :
68 m = arma::zeros<arma::mat>(rows, cols);
69 v = arma::zeros<arma::mat>(rows, cols);
70 vImproved = arma::zeros<arma::mat>(rows, cols);
81 const double stepSize,
82 const arma::mat& gradient)
89 m += (1 - beta1) * gradient;
92 v += (1 - beta2) * (gradient % gradient);
94 const double biasCorrection1 = 1.0 - std::pow(beta1, iteration);
95 const double biasCorrection2 = 1.0 - std::pow(beta2, iteration);
98 vImproved = arma::max(vImproved, v);
100 iterate -= (stepSize * std::sqrt(biasCorrection2) / biasCorrection1) *
101 m / (arma::sqrt(vImproved) + epsilon);
110 double Beta1()
const {
return beta1; }
115 double Beta2()
const {
return beta2; }
double & Beta1()
Modify the smoothing parameter.
double Beta2() const
Get the second moment coefficient.
double Epsilon() const
Get the value used to initialise the squared gradient parameter.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double Beta1() const
Get the smoothing parameter.
AMSGradUpdate(const double epsilon=1e-8, const double beta1=0.9, const double beta2=0.999)
Construct the AMSGrad update policy with the given parameters.
void Initialize(const size_t rows, const size_t cols)
The Initialize method is called by SGD Optimizer method before the start of the iteration update proc...
AMSGrad is an exponential moving average variant which along with having benefits of optimizers like ...
void Update(arma::mat &iterate, const double stepSize, const arma::mat &gradient)
Update step for AMSGrad.
double & Beta2()
Modify the second moment coefficient.
double & Epsilon()
Modify the value used to initialise the squared gradient parameter.