12 #ifndef MLPACK_METHODS_ANN_RNN_HPP 13 #define MLPACK_METHODS_ANN_RNN_HPP 28 #include <ensmallen.hpp> 40 typename OutputLayerType = NegativeLogLikelihood<>,
41 typename InitializationRuleType = RandomInitialization,
42 typename... CustomLayers
49 InitializationRuleType,
68 const bool single =
false,
69 OutputLayerType outputLayer = OutputLayerType(),
70 InitializationRuleType initializeRule = InitializationRuleType());
102 template<
typename OptimizerType,
typename... CallbackTypes>
103 double Train(arma::cube predictors,
104 arma::cube responses,
105 OptimizerType& optimizer,
106 CallbackTypes&&... callbacks);
135 template<
typename OptimizerType = ens::StandardSGD,
typename... CallbackTypes>
136 double Train(arma::cube predictors,
137 arma::cube responses,
138 CallbackTypes&&... callbacks);
159 void Predict(arma::cube predictors,
161 const size_t batchSize = 256);
175 double Evaluate(
const arma::mat& parameters,
177 const size_t batchSize,
178 const bool deterministic);
191 double Evaluate(
const arma::mat& parameters,
193 const size_t batchSize);
206 template<
typename GradType>
210 const size_t batchSize);
225 void Gradient(
const arma::mat& parameters,
228 const size_t batchSize);
241 template <
class LayerType,
class... Args>
242 void Add(Args... args) { network.push_back(
new LayerType(args...)); }
260 const size_t&
Rho()
const {
return rho; }
262 size_t&
Rho() {
return rho; }
265 const arma::cube&
Responses()
const {
return responses; }
287 template<
typename Archive>
288 void serialize(Archive& ar,
const unsigned int );
298 void Forward(arma::mat&& input);
315 template<
typename InputType>
322 void ResetDeterministic();
327 void ResetGradients(arma::mat& gradient);
333 OutputLayerType outputLayer;
337 InitializationRuleType initializeRule;
355 std::vector<
LayerTypes<CustomLayers...> > network;
358 arma::cube predictors;
361 arma::cube responses;
379 std::vector<arma::mat> moduleOutputParameter;
394 arma::mat currentGradient;
398 typename OutputLayerType1,
399 typename MergeLayerType1,
400 typename MergeOutputType1,
401 typename InitializationRuleType1,
402 typename... CustomLayers1
413 namespace serialization {
415 template<
typename OutputLayerType,
416 typename InitializationRuleType,
419 mlpack::ann::RNN<OutputLayerType, InitializationRuleType, CustomLayer...>>
421 BOOST_STATIC_CONSTANT(
int, value = 1);
428 #include "rnn_impl.hpp" DeleteVisitor executes the destructor of the instantiated object.
RNN(const size_t rho, const bool single=false, OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the RNN object.
void ResetParameters()
Reset the module information (weights/parameters).
Set the serialization version of the adaboost class.
double Train(arma::cube predictors, arma::cube responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the recurrent neural network on the given input data using the given optimizer.
void Predict(arma::cube predictors, arma::cube &results, const size_t batchSize=256)
Predict the responses to a given set of predictors.
void serialize(Archive &ar, const unsigned int)
Serialize the model.
The core includes that mlpack expects; standard C++ includes and Armadillo.
void Reset()
Reset the state of the network.
size_t & Rho()
Modify the maximum length of backpropagation through time.
WeightSizeVisitor returns the number of weights of the given module.
const arma::mat & Parameters() const
Return the initial point for the optimization.
arma::mat & Parameters()
Modify the initial point for the optimization.
double Evaluate(const arma::mat ¶meters, const size_t begin, const size_t batchSize, const bool deterministic)
Evaluate the recurrent neural network with the given parameters.
Implementation of a standard recurrent neural network container.
Implementation of the base layer.
arma::cube & Predictors()
Modify the matrix of data points (predictors).
ResetVisitor executes the Reset() function.
OutputParameterVisitor exposes the output parameter of the given module.
const arma::cube & Predictors() const
Get the matrix of data points (predictors).
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the recurrent neural network with the given parameters, and with respect to ...
Implementation of a standard bidirectional recurrent neural network container.
double EvaluateWithGradient(const arma::mat ¶meters, const size_t begin, GradType &gradient, const size_t batchSize)
Evaluate the recurrent neural network with the given parameters.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
DeltaVisitor exposes the delta parameter of the given module.
const size_t & Rho() const
Return the maximum length of backpropagation through time.
void Add(LayerTypes< CustomLayers... > layer)
arma::cube & Responses()
Modify the matrix of responses to the input data points.
~RNN()
Destructor to release allocated memory.
const arma::cube & Responses() const
Get the matrix of responses to the input data points.
void Shuffle()
Shuffle the order of function visitation.