13 #ifndef MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP 14 #define MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP 42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
62 template<
typename InputType,
typename TargetType>
63 double Forward(
const InputType&& input,
const TargetType&& target);
72 template<
typename InputType,
typename TargetType,
typename OutputType>
73 void Backward(
const InputType&& input,
74 const TargetType&& target,
88 OutputDataType&
Delta()
const {
return delta; }
90 OutputDataType&
Delta() {
return delta; }
100 template<
typename Archive>
101 void serialize(Archive& ar,
const unsigned int );
105 OutputDataType delta;
108 InputDataType inputParameter;
111 OutputDataType outputParameter;
121 #include "kl_divergence_impl.hpp" void Backward(const InputType &&input, const TargetType &&target, OutputType &&output)
Ordinary feed backward pass of a neural network.
double Forward(const InputType &&input, const TargetType &&target)
Computes the Kullback–Leibler divergence error function.
OutputDataType & Delta()
Modify the delta.
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool TakeMean() const
Get the value of takeMean.
OutputDataType & OutputParameter()
Modify the output parameter.
bool & TakeMean()
Modify the value of takeMean.
InputDataType & InputParameter() const
Get the input parameter.
KLDivergence(const bool takeMean=false)
Create the Kullback–Leibler Divergence object with the specified parameters.
OutputDataType & Delta() const
Get the delta.
void serialize(Archive &ar, const unsigned int)
Serialize the loss function.
InputDataType & InputParameter()
Modify the input parameter.
The Kullback–Leibler divergence is often used for continuous distributions (direct regression)...
OutputDataType & OutputParameter() const
Get the output parameter.