ffn.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_FFN_HPP
14 #define MLPACK_METHODS_ANN_FFN_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
25 #include "visitor/copy_visitor.hpp"
26 #include "visitor/loss_visitor.hpp"
27 
29 
33 #include <ensmallen.hpp>
34 
35 namespace mlpack {
36 namespace ann {
37 
46 template<
47  typename OutputLayerType = NegativeLogLikelihood<>,
48  typename InitializationRuleType = RandomInitialization,
49  typename... CustomLayers
50 >
51 class FFN
52 {
53  public:
56 
70  FFN(OutputLayerType outputLayer = OutputLayerType(),
71  InitializationRuleType initializeRule = InitializationRuleType());
72 
74  FFN(const FFN&);
75 
77  FFN(FFN&&);
78 
81 
83  ~FFN();
84 
105  template<typename OptimizerType, typename... CallbackTypes>
106  double Train(arma::mat predictors,
107  arma::mat responses,
108  OptimizerType& optimizer,
109  CallbackTypes&&... callbacks);
110 
131  template<typename OptimizerType = ens::RMSProp, typename... CallbackTypes>
132  double Train(arma::mat predictors,
133  arma::mat responses,
134  CallbackTypes&&... callbacks);
135 
147  void Predict(arma::mat predictors, arma::mat& results);
148 
156  double Evaluate(arma::mat predictors, arma::mat responses);
157 
166  double Evaluate(const arma::mat& parameters);
167 
181  double Evaluate(const arma::mat& parameters,
182  const size_t begin,
183  const size_t batchSize,
184  const bool deterministic);
185 
198  double Evaluate(const arma::mat& parameters,
199  const size_t begin,
200  const size_t batchSize);
201 
210  template<typename GradType>
211  double EvaluateWithGradient(const arma::mat& parameters, GradType& gradient);
212 
225  template<typename GradType>
226  double EvaluateWithGradient(const arma::mat& parameters,
227  const size_t begin,
228  GradType& gradient,
229  const size_t batchSize);
230 
243  void Gradient(const arma::mat& parameters,
244  const size_t begin,
245  arma::mat& gradient,
246  const size_t batchSize);
247 
252  void Shuffle();
253 
254  /*
255  * Add a new module to the model.
256  *
257  * @param args The layer parameter.
258  */
259  template <class LayerType, class... Args>
260  void Add(Args... args) { network.push_back(new LayerType(args...)); }
261 
262  /*
263  * Add a new module to the model.
264  *
265  * @param layer The Layer to be added to the model.
266  */
267  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
268 
270  const std::vector<LayerTypes<CustomLayers...> >& Model() const
271  {
272  return network;
273  }
275  std::vector<LayerTypes<CustomLayers...> >& Model() { return network; }
276 
278  size_t NumFunctions() const { return numFunctions; }
279 
281  const arma::mat& Parameters() const { return parameter; }
283  arma::mat& Parameters() { return parameter; }
284 
286  const arma::mat& Responses() const { return responses; }
288  arma::mat& Responses() { return responses; }
289 
291  const arma::mat& Predictors() const { return predictors; }
293  arma::mat& Predictors() { return predictors; }
294 
298  void ResetParameters();
299 
301  template<typename Archive>
302  void serialize(Archive& ar, const unsigned int /* version */);
303 
314  void Forward(arma::mat inputs, arma::mat& results);
315 
327  void Forward(arma::mat inputs,
328  arma::mat& results,
329  const size_t begin,
330  const size_t end);
331 
343  double Backward(arma::mat targets, arma::mat& gradients);
344 
345  private:
346  // Helper functions.
353  void Forward(arma::mat&& input);
354 
362  void ResetData(arma::mat predictors, arma::mat responses);
363 
368  void Backward();
369 
374  void Gradient(arma::mat&& input);
375 
380  void ResetDeterministic();
381 
385  void ResetGradients(arma::mat& gradient);
386 
392  void Swap(FFN& network);
393 
395  OutputLayerType outputLayer;
396 
399  InitializationRuleType initializeRule;
400 
402  size_t width;
403 
405  size_t height;
406 
408  bool reset;
409 
411  std::vector<LayerTypes<CustomLayers...> > network;
412 
414  arma::mat predictors;
415 
417  arma::mat responses;
418 
420  arma::mat parameter;
421 
423  size_t numFunctions;
424 
426  arma::mat error;
427 
429  arma::mat currentInput;
430 
432  DeltaVisitor deltaVisitor;
433 
435  OutputParameterVisitor outputParameterVisitor;
436 
438  WeightSizeVisitor weightSizeVisitor;
439 
441  OutputWidthVisitor outputWidthVisitor;
442 
444  OutputHeightVisitor outputHeightVisitor;
445 
447  LossVisitor lossVisitor;
448 
450  ResetVisitor resetVisitor;
451 
453  DeleteVisitor deleteVisitor;
454 
456  bool deterministic;
457 
459  arma::mat delta;
460 
462  arma::mat inputParameter;
463 
465  arma::mat outputParameter;
466 
468  arma::mat gradient;
469 
471  CopyVisitor<CustomLayers...> copyVisitor;
472 
473  // The GAN class should have access to internal members.
474  template<
475  typename Model,
476  typename InitializerType,
477  typename NoiseType,
478  typename PolicyType
479  >
480  friend class GAN;
481 }; // class FFN
482 
483 } // namespace ann
484 } // namespace mlpack
485 
488 namespace boost {
489 namespace serialization {
490 
491 template<typename OutputLayerType,
492  typename InitializationRuleType,
493  typename... CustomLayer>
494 struct version<
495  mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
496 {
497  BOOST_STATIC_CONSTANT(int, value = 1);
498 };
499 
500 } // namespace serialization
501 } // namespace boost
502 
503 // Include implementation.
504 #include "ffn_impl.hpp"
505 
506 #endif
std::vector< LayerTypes< CustomLayers... > > & Model()
Modify the network model.
Definition: ffn.hpp:275
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
Definition: ffn.hpp:288
void serialize(Archive &ar, const unsigned int)
Serialize the model.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: ffn.hpp:278
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
Set the serialization version of the adaboost class.
Definition: adaboost.hpp:180
.hpp
Definition: add_to_po.hpp:21
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
void Add(Args... args)
Definition: ffn.hpp:260
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
Definition: ffn.hpp:291
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
void Forward(arma::mat inputs, arma::mat &results)
Perform the forward pass of the data in real batch mode.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
Implementation of the base layer.
Definition: base_layer.hpp:49
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
Definition: ffn.hpp:286
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
Definition: ffn.hpp:267
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: ffn.hpp:283
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
Definition: ffn.hpp:293
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: ffn.hpp:281
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
DeltaVisitor exposes the delta parameter of the given module.
The implementation of the standard GAN module.
Definition: gan.hpp:63
const std::vector< LayerTypes< CustomLayers... > > & Model() const
Get the network model.
Definition: ffn.hpp:270
Implementation of a standard feed forward network.
Definition: ffn.hpp:51
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(arma::mat targets, arma::mat &gradients)
Perform the backward pass of the data in real batch mode.
double Evaluate(arma::mat predictors, arma::mat responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the feedforward network on the given input data using the given optimizer.