kl_divergence.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP
14 #define MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
18 namespace mlpack {
19 namespace ann {
20 
41 template <
42  typename InputDataType = arma::mat,
43  typename OutputDataType = arma::mat
44 >
46 {
47  public:
54  KLDivergence(const bool takeMean = false);
55 
62  template<typename InputType, typename TargetType>
63  double Forward(const InputType&& input, const TargetType&& target);
64 
72  template<typename InputType, typename TargetType, typename OutputType>
73  void Backward(const InputType&& input,
74  const TargetType&& target,
75  OutputType&& output);
76 
78  OutputDataType& OutputParameter() const { return outputParameter; }
80  OutputDataType& OutputParameter() { return outputParameter; }
81 
83  bool TakeMean() const { return takeMean; }
85  bool& TakeMean() { return takeMean; }
86 
90  template<typename Archive>
91  void serialize(Archive& ar, const unsigned int /* version */);
92 
93  private:
95  OutputDataType outputParameter;
96 
98  bool takeMean;
99 }; // class KLDivergence
100 
101 } // namespace ann
102 } // namespace mlpack
103 
104 // include implementation
105 #include "kl_divergence_impl.hpp"
106 
107 #endif
void Backward(const InputType &&input, const TargetType &&target, OutputType &&output)
Ordinary feed backward pass of a neural network.
double Forward(const InputType &&input, const TargetType &&target)
Computes the Kullback–Leibler divergence error function.
strip_type.hpp
Definition: add_to_po.hpp:21
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool TakeMean() const
Get the value of takeMean.
OutputDataType & OutputParameter()
Modify the output parameter.
bool & TakeMean()
Modify the value of takeMean.
KLDivergence(const bool takeMean=false)
Create the Kullback–Leibler Divergence object with the specified parameters.
void serialize(Archive &ar, const unsigned int)
Serialize the loss function.
The Kullback–Leibler divergence is often used for continuous distributions (direct regression)...
OutputDataType & OutputParameter() const
Get the output parameter.