13 #ifndef MLPACK_METHODS_ANN_FFN_HPP 14 #define MLPACK_METHODS_ANN_FFN_HPP 33 #include <ensmallen.hpp> 47 typename OutputLayerType = NegativeLogLikelihood<>,
48 typename InitializationRuleType = RandomInitialization,
49 typename... CustomLayers
70 FFN(OutputLayerType outputLayer = OutputLayerType(),
71 InitializationRuleType initializeRule = InitializationRuleType());
102 template<
typename OptimizerType>
103 double Train(arma::mat predictors,
105 OptimizerType& optimizer);
124 template<
typename OptimizerType = ens::RMSProp>
125 double Train(arma::mat predictors, arma::mat responses);
138 void Predict(arma::mat predictors, arma::mat& results);
147 double Evaluate(arma::mat predictors, arma::mat responses);
157 double Evaluate(
const arma::mat& parameters);
172 double Evaluate(
const arma::mat& parameters,
174 const size_t batchSize,
175 const bool deterministic);
189 double Evaluate(
const arma::mat& parameters,
191 const size_t batchSize);
201 template<
typename GradType>
216 template<
typename GradType>
220 const size_t batchSize);
234 void Gradient(
const arma::mat& parameters,
237 const size_t batchSize);
250 template <
class LayerType,
class... Args>
251 void Add(Args... args) { network.push_back(
new LayerType(args...)); }
269 const arma::mat&
Responses()
const {
return responses; }
284 template<
typename Archive>
285 void serialize(Archive& ar,
const unsigned int );
297 void Forward(arma::mat inputs, arma::mat& results);
326 double Backward(arma::mat targets, arma::mat& gradients);
336 void Forward(arma::mat&& input);
345 void ResetData(arma::mat predictors, arma::mat responses);
363 void ResetDeterministic();
368 void ResetGradients(arma::mat& gradient);
375 void Swap(
FFN& network);
378 OutputLayerType outputLayer;
382 InitializationRuleType initializeRule;
394 std::vector<
LayerTypes<CustomLayers...> > network;
397 arma::mat predictors;
412 arma::mat currentInput;
445 arma::mat inputParameter;
448 arma::mat outputParameter;
459 typename InitializerType,
472 namespace serialization {
474 template<
typename OutputLayerType,
475 typename InitializationRuleType,
478 mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
480 BOOST_STATIC_CONSTANT(
int, value = 1);
487 #include "ffn_impl.hpp" DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
void serialize(Archive &ar, const unsigned int)
Serialize the model.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, CustomLayers *... > LayerTypes
Set the serialization version of the adaboost class.
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
The core includes that mlpack expects; standard C++ includes and Armadillo.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer)
Train the feedforward network on the given input data using the given optimizer.
WeightSizeVisitor returns the number of weights of the given module.
void Forward(arma::mat inputs, arma::mat &results)
Perform the forward pass of the data in real batch mode.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
Implementation of the base layer.
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat ¶meters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
arma::mat & Parameters()
Modify the initial point for the optimization.
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
const arma::mat & Parameters() const
Return the initial point for the optimization.
DeltaVisitor exposes the delta parameter of the given module.
Implementation of a standard feed forward network.
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(arma::mat targets, arma::mat &gradients)
Perform the backward pass of the data in real batch mode.
double Evaluate(arma::mat predictors, arma::mat responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.