13 #ifndef MLPACK_METHODS_ANN_BRNN_HPP 14 #define MLPACK_METHODS_ANN_BRNN_HPP 29 #include <ensmallen.hpp> 41 typename OutputLayerType = NegativeLogLikelihood<>,
42 typename MergeLayerType = Concat<>,
43 typename MergeOutputType = LogSoftMax<>,
44 typename InitializationRuleType = RandomInitialization,
45 typename... CustomLayers
54 InitializationRuleType,
72 BRNN(
const size_t rho,
73 const bool single =
false,
74 OutputLayerType outputLayer = OutputLayerType(),
75 MergeLayerType mergeLayer = MergeLayerType(),
76 MergeOutputType mergeOutput = MergeOutputType(),
77 InitializationRuleType initializeRule = InitializationRuleType());
102 template<
typename OptimizerType>
103 double Train(arma::cube predictors,
104 arma::cube responses,
105 OptimizerType& optimizer);
130 template<
typename OptimizerType = ens::StandardSGD>
131 double Train(arma::cube predictors, arma::cube responses);
152 void Predict(arma::cube predictors,
154 const size_t batchSize = 256);
169 double Evaluate(
const arma::mat& parameters,
171 const size_t batchSize,
172 const bool deterministic);
186 double Evaluate(
const arma::mat& parameters,
188 const size_t batchSize);
203 template<
typename GradType>
207 const size_t batchSize);
222 void Gradient(
const arma::mat& parameters,
225 const size_t batchSize);
238 template <
class LayerType,
class... Args>
239 void Add(Args... args);
257 const size_t&
Rho()
const {
return rho; }
259 size_t&
Rho() {
return rho; }
262 const arma::cube&
Responses()
const {
return responses; }
284 template<
typename Archive>
285 void serialize(Archive& ar,
const unsigned int );
293 void ResetDeterministic();
299 OutputLayerType outputLayer;
309 InitializationRuleType initializeRule;
327 arma::cube predictors;
330 arma::cube responses;
348 std::vector<arma::mat> forwardRNNOutputParameter;
351 std::vector<arma::mat> backwardRNNOutputParameter;
369 arma::mat forwardGradient;
372 arma::mat backwardGradient;
375 arma::mat totalGradient;
378 RNN<OutputLayerType, InitializationRuleType, CustomLayers...> forwardRNN;
381 RNN<OutputLayerType, InitializationRuleType, CustomLayers...> backwardRNN;
389 namespace serialization {
391 template<
typename OutputLayerType,
392 typename InitializationRuleType,
393 typename MergeLayerType,
394 typename MergeOutputType,
397 mlpack::ann::BRNN<OutputLayerType, MergeLayerType, MergeOutputType,
398 InitializationRuleType, CustomLayer...>>
400 BOOST_STATIC_CONSTANT(
int, value = 1);
407 #include "brnn_impl.hpp" DeleteVisitor executes the destructor of the instantiated object.
void ResetParameters()
Reset the module information (weights/parameters).
Set the serialization version of the adaboost class.
arma::mat & Parameters()
Modify the initial point for the optimization.
const size_t & Rho() const
Return the maximum length of backpropagation through time.
double Train(arma::cube predictors, arma::cube responses, OptimizerType &optimizer)
Train the bidirectional recurrent neural network on the given input data using the given optimizer...
This visitor is to support copy constructor for neural network module.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double EvaluateWithGradient(const arma::mat ¶meters, const size_t begin, GradType &gradient, const size_t batchSize)
Evaluate the bidirectional recurrent neural network with the given parameters.
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the bidirectional recurrent neural network with the given parameters...
WeightSizeVisitor returns the number of weights of the given module.
arma::cube & Responses()
Modify the matrix of responses to the input data points.
const arma::mat & Parameters() const
Return the initial point for the optimization.
Implementation of a standard recurrent neural network container.
Implementation of the base layer.
BRNN(const size_t rho, const bool single=false, OutputLayerType outputLayer=OutputLayerType(), MergeLayerType mergeLayer=MergeLayerType(), MergeOutputType mergeOutput=MergeOutputType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the BRNN object.
void Reset()
Reset the state of the network.
void Predict(arma::cube predictors, arma::cube &results, const size_t batchSize=256)
Predict the responses to a given set of predictors.
const arma::cube & Predictors() const
Get the matrix of data points (predictors).
ResetVisitor executes the Reset() function.
OutputParameterVisitor exposes the output parameter of the given module.
double Evaluate(const arma::mat ¶meters, const size_t begin, const size_t batchSize, const bool deterministic)
Evaluate the bidirectional recurrent neural network with the given parameters.
void Shuffle()
Shuffle the order of function visitation.
size_t NumFunctions() const
Return the number of separable functions. (number of predictor points).
const arma::cube & Responses() const
Get the matrix of responses to the input data points.
Implementation of a standard bidirectional recurrent neural network container.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
void serialize(Archive &ar, const unsigned int)
Serialize the model.
arma::cube & Predictors()
Modify the matrix of data points (predictors).
DeltaVisitor exposes the delta parameter of the given module.
size_t & Rho()
Modify the maximum length of backpropagation through time.