14 #ifndef MLPACK_CORE_OPTIMIZERS_BIGBATCH_SGD_BIGBATCH_SGD_HPP 15 #define MLPACK_CORE_OPTIMIZERS_BIGBATCH_SGD_BIGBATCH_SGD_HPP 23 namespace optimization {
92 template<
typename UpdatePolicyType = AdaptiveStepsize>
113 const double stepSize = 0.01,
114 const double batchDelta = 0.1,
115 const size_t maxIterations = 100000,
116 const double tolerance = 1e-5,
117 const bool shuffle =
true);
128 template<
typename DecomposableFunctionType>
129 double Optimize(DecomposableFunctionType&
function,
178 size_t maxIterations;
188 UpdatePolicyType updatePolicy;
198 #include "bigbatch_sgd_impl.hpp"
size_t & MaxIterations()
Modify the maximum number of iterations (0 indicates no limit).
The core includes that mlpack expects; standard C++ includes and Armadillo.
double BatchDelta() const
Get the batch delta.
size_t BatchSize() const
Get the batch size.
UpdatePolicyType UpdatePolicy() const
Get the update policy.
BigBatchSGD(const size_t batchSize=1000, const double stepSize=0.01, const double batchDelta=0.1, const size_t maxIterations=100000, const double tolerance=1e-5, const bool shuffle=true)
Construct the BigBatchSGD optimizer with the given function and parameters.
bool & Shuffle()
Modify whether or not the individual functions are shuffled.
double StepSize() const
Get the step size.
double & StepSize()
Modify the step size.
UpdatePolicyType & UpdatePolicy()
Modify the update policy.
double Optimize(DecomposableFunctionType &function, arma::mat &iterate)
Optimize the given function using big-batch SGD.
size_t MaxIterations() const
Get the maximum number of iterations (0 indicates no limit).
Big-batch Stochastic Gradient Descent is a technique for minimizing a function which can be expressed...
double Tolerance() const
Get the tolerance for termination.
bool Shuffle() const
Get whether or not the individual functions are shuffled.
double & BatchDelta()
Modify the batch delta.
size_t & BatchSize()
Modify the batch size.
double & Tolerance()
Modify the tolerance for termination.