17 #ifndef MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_UPDATE_HPP 18 #define MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_UPDATE_HPP 23 namespace optimization {
54 const double beta1 = 0.9,
55 const double beta2 = 0.999) :
73 m = arma::zeros<arma::mat>(rows, cols);
74 v = arma::zeros<arma::mat>(rows, cols);
85 const double stepSize,
86 const arma::mat& gradient)
93 m += (1 - beta1) * gradient;
96 v += (1 - beta2) * (gradient % gradient);
98 const double biasCorrection1 = 1.0 - std::pow(beta1, iteration);
99 const double biasCorrection2 = 1.0 - std::pow(beta2, iteration);
106 iterate -= (stepSize * std::sqrt(biasCorrection2) / biasCorrection1) *
107 m / (arma::sqrt(v) + epsilon);
116 double Beta1()
const {
return beta1; }
121 double Beta2()
const {
return beta2; }
void Initialize(const size_t rows, const size_t cols)
The Initialize method is called by SGD Optimizer method before the start of the iteration update proc...
The core includes that mlpack expects; standard C++ includes and Armadillo.
AdamUpdate(const double epsilon=1e-8, const double beta1=0.9, const double beta2=0.999)
Construct the Adam update policy with the given parameters.
Adam is an optimizer that computes individual adaptive learning rates for different parameters from e...
double Epsilon() const
Get the value used to initialise the squared gradient parameter.
void Update(arma::mat &iterate, const double stepSize, const arma::mat &gradient)
Update step for Adam.
double Beta1() const
Get the smoothing parameter.
double & Epsilon()
Modify the value used to initialise the squared gradient parameter.
double Beta2() const
Get the second moment coefficient.
double & Beta1()
Modify the smoothing parameter.
double & Beta2()
Modify the second moment coefficient.