ffn.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_FFN_HPP
14 #define MLPACK_METHODS_ANN_FFN_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
25 #include "visitor/copy_visitor.hpp"
26 #include "visitor/loss_visitor.hpp"
27 
29 
33 #include <ensmallen.hpp>
34 
35 namespace mlpack {
36 namespace ann {
37 
46 template<
47  typename OutputLayerType = NegativeLogLikelihood<>,
48  typename InitializationRuleType = RandomInitialization,
49  typename... CustomLayers
50 >
51 class FFN
52 {
53  public:
56 
70  FFN(OutputLayerType outputLayer = OutputLayerType(),
71  InitializationRuleType initializeRule = InitializationRuleType());
72 
74  FFN(const FFN&);
75 
77  FFN(FFN&&);
78 
81 
83  ~FFN();
84 
105  template<typename OptimizerType, typename... CallbackTypes>
106  double Train(arma::mat predictors,
107  arma::mat responses,
108  OptimizerType& optimizer,
109  CallbackTypes&&... callbacks);
110 
131  template<typename OptimizerType = ens::RMSProp, typename... CallbackTypes>
132  double Train(arma::mat predictors,
133  arma::mat responses,
134  CallbackTypes&&... callbacks);
135 
147  void Predict(arma::mat predictors, arma::mat& results);
148 
156  double Evaluate(arma::mat predictors, arma::mat responses);
157 
166  double Evaluate(const arma::mat& parameters);
167 
181  double Evaluate(const arma::mat& parameters,
182  const size_t begin,
183  const size_t batchSize,
184  const bool deterministic);
185 
198  double Evaluate(const arma::mat& parameters,
199  const size_t begin,
200  const size_t batchSize);
201 
210  template<typename GradType>
211  double EvaluateWithGradient(const arma::mat& parameters, GradType& gradient);
212 
225  template<typename GradType>
226  double EvaluateWithGradient(const arma::mat& parameters,
227  const size_t begin,
228  GradType& gradient,
229  const size_t batchSize);
230 
243  void Gradient(const arma::mat& parameters,
244  const size_t begin,
245  arma::mat& gradient,
246  const size_t batchSize);
247 
252  void Shuffle();
253 
254  /*
255  * Add a new module to the model.
256  *
257  * @param args The layer parameter.
258  */
259  template <class LayerType, class... Args>
260  void Add(Args... args) { network.push_back(new LayerType(args...)); }
261 
262  /*
263  * Add a new module to the model.
264  *
265  * @param layer The Layer to be added to the model.
266  */
267  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
268 
270  const std::vector<LayerTypes<CustomLayers...> >& Model() const
271  {
272  return network;
273  }
277  std::vector<LayerTypes<CustomLayers...> >& Model() { return network; }
278 
280  size_t NumFunctions() const { return numFunctions; }
281 
283  const arma::mat& Parameters() const { return parameter; }
285  arma::mat& Parameters() { return parameter; }
286 
288  const arma::mat& Responses() const { return responses; }
290  arma::mat& Responses() { return responses; }
291 
293  const arma::mat& Predictors() const { return predictors; }
295  arma::mat& Predictors() { return predictors; }
296 
300  void ResetParameters();
301 
303  template<typename Archive>
304  void serialize(Archive& ar, const unsigned int /* version */);
305 
316  void Forward(arma::mat inputs, arma::mat& results);
317 
329  void Forward(arma::mat inputs,
330  arma::mat& results,
331  const size_t begin,
332  const size_t end);
333 
345  double Backward(arma::mat targets, arma::mat& gradients);
346 
347  private:
348  // Helper functions.
355  void Forward(arma::mat&& input);
356 
364  void ResetData(arma::mat predictors, arma::mat responses);
365 
370  void Backward();
371 
376  void Gradient(arma::mat&& input);
377 
382  void ResetDeterministic();
383 
387  void ResetGradients(arma::mat& gradient);
388 
394  void Swap(FFN& network);
395 
397  OutputLayerType outputLayer;
398 
401  InitializationRuleType initializeRule;
402 
404  size_t width;
405 
407  size_t height;
408 
410  bool reset;
411 
413  std::vector<LayerTypes<CustomLayers...> > network;
414 
416  arma::mat predictors;
417 
419  arma::mat responses;
420 
422  arma::mat parameter;
423 
425  size_t numFunctions;
426 
428  arma::mat error;
429 
431  arma::mat currentInput;
432 
434  DeltaVisitor deltaVisitor;
435 
437  OutputParameterVisitor outputParameterVisitor;
438 
440  WeightSizeVisitor weightSizeVisitor;
441 
443  OutputWidthVisitor outputWidthVisitor;
444 
446  OutputHeightVisitor outputHeightVisitor;
447 
449  LossVisitor lossVisitor;
450 
452  ResetVisitor resetVisitor;
453 
455  DeleteVisitor deleteVisitor;
456 
458  bool deterministic;
459 
461  arma::mat delta;
462 
464  arma::mat inputParameter;
465 
467  arma::mat outputParameter;
468 
470  arma::mat gradient;
471 
473  CopyVisitor<CustomLayers...> copyVisitor;
474 
475  // The GAN class should have access to internal members.
476  template<
477  typename Model,
478  typename InitializerType,
479  typename NoiseType,
480  typename PolicyType
481  >
482  friend class GAN;
483 }; // class FFN
484 
485 } // namespace ann
486 } // namespace mlpack
487 
490 namespace boost {
491 namespace serialization {
492 
493 template<typename OutputLayerType,
494  typename InitializationRuleType,
495  typename... CustomLayer>
496 struct version<
497  mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
498 {
499  BOOST_STATIC_CONSTANT(int, value = 1);
500 };
501 
502 } // namespace serialization
503 } // namespace boost
504 
505 // Include implementation.
506 #include "ffn_impl.hpp"
507 
508 #endif
std::vector< LayerTypes< CustomLayers... > > & Model()
Modify the network model.
Definition: ffn.hpp:277
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
Definition: ffn.hpp:290
void serialize(Archive &ar, const unsigned int)
Serialize the model.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: ffn.hpp:280
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
Set the serialization version of the adaboost class.
Definition: adaboost.hpp:194
strip_type.hpp
Definition: add_to_po.hpp:21
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
void Add(Args... args)
Definition: ffn.hpp:260
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
Definition: ffn.hpp:293
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
void Forward(arma::mat inputs, arma::mat &results)
Perform the forward pass of the data in real batch mode.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
Implementation of the base layer.
Definition: base_layer.hpp:53
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
Definition: ffn.hpp:288
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
Definition: ffn.hpp:267
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: ffn.hpp:285
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
Definition: ffn.hpp:295
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: ffn.hpp:283
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
DeltaVisitor exposes the delta parameter of the given module.
The implementation of the standard GAN module.
Definition: gan.hpp:63
const std::vector< LayerTypes< CustomLayers... > > & Model() const
Get the network model.
Definition: ffn.hpp:270
Implementation of a standard feed forward network.
Definition: ffn.hpp:51
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(arma::mat targets, arma::mat &gradients)
Perform the backward pass of the data in real batch mode.
double Evaluate(arma::mat predictors, arma::mat responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the feedforward network on the given input data using the given optimizer.