13 #ifndef MLPACK_METHODS_ANN_FFN_HPP 14 #define MLPACK_METHODS_ANN_FFN_HPP 33 #include <ensmallen.hpp> 47 typename OutputLayerType = NegativeLogLikelihood<>,
48 typename InitializationRuleType = RandomInitialization,
49 typename... CustomLayers
70 FFN(OutputLayerType outputLayer = OutputLayerType(),
71 InitializationRuleType initializeRule = InitializationRuleType());
105 template<
typename OptimizerType,
typename... CallbackTypes>
106 double Train(arma::mat predictors,
108 OptimizerType& optimizer,
109 CallbackTypes&&... callbacks);
131 template<
typename OptimizerType = ens::RMSProp,
typename... CallbackTypes>
132 double Train(arma::mat predictors,
134 CallbackTypes&&... callbacks);
147 void Predict(arma::mat predictors, arma::mat& results);
156 double Evaluate(arma::mat predictors, arma::mat responses);
166 double Evaluate(
const arma::mat& parameters);
181 double Evaluate(
const arma::mat& parameters,
183 const size_t batchSize,
184 const bool deterministic);
198 double Evaluate(
const arma::mat& parameters,
200 const size_t batchSize);
210 template<
typename GradType>
225 template<
typename GradType>
229 const size_t batchSize);
243 void Gradient(
const arma::mat& parameters,
246 const size_t batchSize);
259 template <
class LayerType,
class... Args>
260 void Add(Args... args) { network.push_back(
new LayerType(args...)); }
288 const arma::mat&
Responses()
const {
return responses; }
303 template<
typename Archive>
304 void serialize(Archive& ar,
const unsigned int );
316 void Forward(arma::mat inputs, arma::mat& results);
345 double Backward(arma::mat targets, arma::mat& gradients);
355 void Forward(arma::mat&& input);
364 void ResetData(arma::mat predictors, arma::mat responses);
382 void ResetDeterministic();
387 void ResetGradients(arma::mat& gradient);
394 void Swap(
FFN& network);
397 OutputLayerType outputLayer;
401 InitializationRuleType initializeRule;
413 std::vector<
LayerTypes<CustomLayers...> > network;
416 arma::mat predictors;
431 arma::mat currentInput;
464 arma::mat inputParameter;
467 arma::mat outputParameter;
478 typename InitializerType,
491 namespace serialization {
493 template<
typename OutputLayerType,
494 typename InitializationRuleType,
497 mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
499 BOOST_STATIC_CONSTANT(
int, value = 1);
506 #include "ffn_impl.hpp" std::vector< LayerTypes< CustomLayers... > > & Model()
Modify the network model.
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
void serialize(Archive &ar, const unsigned int)
Serialize the model.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
Set the serialization version of the adaboost class.
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
void Forward(arma::mat inputs, arma::mat &results)
Perform the forward pass of the data in real batch mode.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
Implementation of the base layer.
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat ¶meters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
arma::mat & Parameters()
Modify the initial point for the optimization.
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
const arma::mat & Parameters() const
Return the initial point for the optimization.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
DeltaVisitor exposes the delta parameter of the given module.
The implementation of the standard GAN module.
const std::vector< LayerTypes< CustomLayers... > > & Model() const
Get the network model.
Implementation of a standard feed forward network.
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(arma::mat targets, arma::mat &gradients)
Perform the backward pass of the data in real batch mode.
double Evaluate(arma::mat predictors, arma::mat responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the feedforward network on the given input data using the given optimizer.