13 #ifndef MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP 14 #define MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP 42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
62 template<
typename InputType,
typename TargetType>
63 double Forward(
const InputType&& input,
const TargetType&& target);
72 template<
typename InputType,
typename TargetType,
typename OutputType>
73 void Backward(
const InputType&& input,
74 const TargetType&& target,
90 template<
typename Archive>
91 void serialize(Archive& ar,
const unsigned int );
95 OutputDataType outputParameter;
105 #include "kl_divergence_impl.hpp" void Backward(const InputType &&input, const TargetType &&target, OutputType &&output)
Ordinary feed backward pass of a neural network.
double Forward(const InputType &&input, const TargetType &&target)
Computes the Kullback–Leibler divergence error function.
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool TakeMean() const
Get the value of takeMean.
OutputDataType & OutputParameter()
Modify the output parameter.
bool & TakeMean()
Modify the value of takeMean.
KLDivergence(const bool takeMean=false)
Create the Kullback–Leibler Divergence object with the specified parameters.
void serialize(Archive &ar, const unsigned int)
Serialize the loss function.
The Kullback–Leibler divergence is often used for continuous distributions (direct regression)...
OutputDataType & OutputParameter() const
Get the output parameter.