\section{Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$ Class Template Reference}
\label{classmlpack_1_1tree_1_1RandomForest}\index{Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$@{Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$}}
\subsection*{Public Types}
\begin{DoxyCompactItemize}
\item 
typedef \textbf{ Decision\+Tree}$<$ Fitness\+Function, Numeric\+Split\+Type, Categorical\+Split\+Type, Dimension\+Selection\+Type, Elem\+Type $>$ \textbf{ Decision\+Tree\+Type}
\begin{DoxyCompactList}\small\item\em Allow access to the underlying decision tree type. \end{DoxyCompactList}\end{DoxyCompactItemize}
\subsection*{Public Member Functions}
\begin{DoxyCompactItemize}
\item 
\textbf{ Random\+Forest} ()
\begin{DoxyCompactList}\small\item\em Construct the random forest without any training or specifying the number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\\textbf{ Random\+Forest} (const Mat\+Type \&dataset, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Create a random forest, training on the given labeled training data with the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\\textbf{ Random\+Forest} (const Mat\+Type \&dataset, const \textbf{ data\+::\+Dataset\+Info} \&dataset\+Info, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Create a random forest, training on the given labeled training data with the given dataset info and the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\\textbf{ Random\+Forest} (const Mat\+Type \&dataset, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const arma\+::rowvec \&weights, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Create a random forest, training on the given weighted labeled training data with the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\\textbf{ Random\+Forest} (const Mat\+Type \&dataset, const \textbf{ data\+::\+Dataset\+Info} \&dataset\+Info, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const arma\+::rowvec \&weights, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Create a random forest, training on the given weighted labeled training data with the given dataset info and the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Vec\+Type $>$ }\\size\+\_\+t \textbf{ Classify} (const Vec\+Type \&point) const
\begin{DoxyCompactList}\small\item\em Predict the class of the given point. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Vec\+Type $>$ }\\void \textbf{ Classify} (const Vec\+Type \&point, size\+\_\+t \&prediction, arma\+::vec \&probabilities) const
\begin{DoxyCompactList}\small\item\em Predict the class of the given point and return the predicted class probabilities for each class. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\void \textbf{ Classify} (const Mat\+Type \&data, arma\+::\+Row$<$ size\+\_\+t $>$ \&predictions) const
\begin{DoxyCompactList}\small\item\em Predict the classes of each point in the given dataset. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\void \textbf{ Classify} (const Mat\+Type \&data, arma\+::\+Row$<$ size\+\_\+t $>$ \&predictions, arma\+::mat \&probabilities) const
\begin{DoxyCompactList}\small\item\em Predict the classes of each point in the given dataset, also returning the predicted class probabilities for each point. \end{DoxyCompactList}\item 
size\+\_\+t \textbf{ Num\+Trees} () const
\begin{DoxyCompactList}\small\item\em Get the number of trees in the forest. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Archive $>$ }\\void \textbf{ serialize} (Archive \&ar, const unsigned int)
\begin{DoxyCompactList}\small\item\em Serialize the random forest. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\double \textbf{ Train} (const Mat\+Type \&data, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Train the random forest on the given labeled training data with the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\double \textbf{ Train} (const Mat\+Type \&data, const \textbf{ data\+::\+Dataset\+Info} \&dataset\+Info, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Train the random forest on the given labeled training data with the given dataset info and the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\double \textbf{ Train} (const Mat\+Type \&data, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const arma\+::rowvec \&weights, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Train the random forest on the given weighted labeled training data with the given number of trees. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type $>$ }\\double \textbf{ Train} (const Mat\+Type \&data, const \textbf{ data\+::\+Dataset\+Info} \&dataset\+Info, const arma\+::\+Row$<$ size\+\_\+t $>$ \&labels, const size\+\_\+t num\+Classes, const arma\+::rowvec \&weights, const size\+\_\+t num\+Trees=20, const size\+\_\+t minimum\+Leaf\+Size=1, const double minimum\+Gain\+Split=1e-\/7, const size\+\_\+t maximum\+Depth=0, Dimension\+Selection\+Type dimension\+Selector=\+Dimension\+Selection\+Type())
\begin{DoxyCompactList}\small\item\em Train the random forest on the given weighted labeled training data with the given dataset info and the given number of trees. \end{DoxyCompactList}\item 
const \textbf{ Decision\+Tree\+Type} \& \textbf{ Tree} (const size\+\_\+t i) const
\begin{DoxyCompactList}\small\item\em Access a tree in the forest. \end{DoxyCompactList}\item 
\textbf{ Decision\+Tree\+Type} \& \textbf{ Tree} (const size\+\_\+t i)
\begin{DoxyCompactList}\small\item\em Modify a tree in the forest (be careful!). \end{DoxyCompactList}\end{DoxyCompactItemize}


\subsection{Detailed Description}
\subsubsection*{template$<$typename Fitness\+Function = Gini\+Gain, typename Dimension\+Selection\+Type = Multiple\+Random\+Dimension\+Select, template$<$ typename $>$ class Numeric\+Split\+Type = Best\+Binary\+Numeric\+Split, template$<$ typename $>$ class Categorical\+Split\+Type = All\+Categorical\+Split, typename Elem\+Type = double$>$\newline
class mlpack\+::tree\+::\+Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$}



Definition at line 27 of file random\+\_\+forest.\+hpp.



\subsection{Member Typedef Documentation}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a7adbdc5296f7efe79fee1b7fdb676118}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Decision\+Tree\+Type@{Decision\+Tree\+Type}}
\index{Decision\+Tree\+Type@{Decision\+Tree\+Type}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Decision\+Tree\+Type}
{\footnotesize\ttfamily typedef \textbf{ Decision\+Tree}$<$Fitness\+Function, Numeric\+Split\+Type, Categorical\+Split\+Type, Dimension\+Selection\+Type, Elem\+Type$>$ \textbf{ Decision\+Tree\+Type}}



Allow access to the underlying decision tree type. 



Definition at line 32 of file random\+\_\+forest.\+hpp.



\subsection{Constructor \& Destructor Documentation}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a5b3e9a68306078538430d202c765511f}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Random\+Forest@{Random\+Forest}}
\index{Random\+Forest@{Random\+Forest}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Random\+Forest()\hspace{0.1cm}{\footnotesize\ttfamily [1/5]}}
{\footnotesize\ttfamily \textbf{ Random\+Forest} (\begin{DoxyParamCaption}{ }\end{DoxyParamCaption})\hspace{0.3cm}{\ttfamily [inline]}}



Construct the random forest without any training or specifying the number of trees. 

Predict() will throw an exception until \doxyref{Train()}{p.}{classmlpack_1_1tree_1_1RandomForest_a7685d43b0fe0dfdf1e0f067746eb85bb} is called. 

Definition at line 38 of file random\+\_\+forest.\+hpp.



References Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Classify(), and Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Train().

\mbox{\label{classmlpack_1_1tree_1_1RandomForest_afce52e00bdecce34f8faaea718ac3914}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Random\+Forest@{Random\+Forest}}
\index{Random\+Forest@{Random\+Forest}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Random\+Forest()\hspace{0.1cm}{\footnotesize\ttfamily [2/5]}}
{\footnotesize\ttfamily \textbf{ Random\+Forest} (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{dataset,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Create a random forest, training on the given labeled training data with the given number of trees. 

The minimum\+Leaf\+Size and minimum\+Gain\+Split parameters are given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions.


\begin{DoxyParams}{Parameters}
{\em dataset} & Dataset to train on. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_af1e8d1cbbfcc5b9beefffba9df054d16}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Random\+Forest@{Random\+Forest}}
\index{Random\+Forest@{Random\+Forest}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Random\+Forest()\hspace{0.1cm}{\footnotesize\ttfamily [3/5]}}
{\footnotesize\ttfamily \textbf{ Random\+Forest} (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{dataset,  }\item[{const \textbf{ data\+::\+Dataset\+Info} \&}]{dataset\+Info,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Create a random forest, training on the given labeled training data with the given dataset info and the given number of trees. 

The minimum\+Leaf\+Size and minimum\+Gain\+Split parameters are given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions. This constructor can be used to train on categorical data.


\begin{DoxyParams}{Parameters}
{\em dataset} & Dataset to train on. \\
\hline
{\em dataset\+Info} & Dimension info for the dataset. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_ac98132e5bf0ae00c8d5256a17f703a9b}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Random\+Forest@{Random\+Forest}}
\index{Random\+Forest@{Random\+Forest}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Random\+Forest()\hspace{0.1cm}{\footnotesize\ttfamily [4/5]}}
{\footnotesize\ttfamily \textbf{ Random\+Forest} (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{dataset,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const arma\+::rowvec \&}]{weights,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Create a random forest, training on the given weighted labeled training data with the given number of trees. 

The minimum\+Leaf\+Size parameter is given to each individual decision tree during tree building.


\begin{DoxyParams}{Parameters}
{\em dataset} & Dataset to train on. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em weights} & Weights (importances) of each point in the dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_ab5bde168f40055031465da736523863c}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Random\+Forest@{Random\+Forest}}
\index{Random\+Forest@{Random\+Forest}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Random\+Forest()\hspace{0.1cm}{\footnotesize\ttfamily [5/5]}}
{\footnotesize\ttfamily \textbf{ Random\+Forest} (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{dataset,  }\item[{const \textbf{ data\+::\+Dataset\+Info} \&}]{dataset\+Info,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const arma\+::rowvec \&}]{weights,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Create a random forest, training on the given weighted labeled training data with the given dataset info and the given number of trees. 

The minimum\+Leaf\+Size and minimum\+Gain\+Split parameters are given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions. This can be used for categorical weighted training.


\begin{DoxyParams}{Parameters}
{\em dataset} & Dataset to train on. \\
\hline
{\em dataset\+Info} & Dimension info for the dataset. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em weights} & Weights (importances) of each point in the dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}


\subsection{Member Function Documentation}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_aa610310b354b7badd88041ca07883569}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Classify@{Classify}}
\index{Classify@{Classify}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Classify()\hspace{0.1cm}{\footnotesize\ttfamily [1/4]}}
{\footnotesize\ttfamily size\+\_\+t Classify (\begin{DoxyParamCaption}\item[{const Vec\+Type \&}]{point }\end{DoxyParamCaption}) const}



Predict the class of the given point. 

If the random forest has not been trained, this will throw an exception.


\begin{DoxyParams}{Parameters}
{\em point} & Point to be classified. \\
\hline
\end{DoxyParams}


Referenced by Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Random\+Forest().

\mbox{\label{classmlpack_1_1tree_1_1RandomForest_abddb69ee5b97027cd4eebd398bf39ac6}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Classify@{Classify}}
\index{Classify@{Classify}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Classify()\hspace{0.1cm}{\footnotesize\ttfamily [2/4]}}
{\footnotesize\ttfamily void Classify (\begin{DoxyParamCaption}\item[{const Vec\+Type \&}]{point,  }\item[{size\+\_\+t \&}]{prediction,  }\item[{arma\+::vec \&}]{probabilities }\end{DoxyParamCaption}) const}



Predict the class of the given point and return the predicted class probabilities for each class. 

If the random forest has not been trained, this will throw an exception.


\begin{DoxyParams}{Parameters}
{\em point} & Point to be classified. \\
\hline
{\em prediction} & size\+\_\+t to store predicted class in. \\
\hline
{\em probabilities} & Output vector of class probabilities. \\
\hline
\end{DoxyParams}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_abd3c3812715d2e249e47476782d8e95e}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Classify@{Classify}}
\index{Classify@{Classify}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Classify()\hspace{0.1cm}{\footnotesize\ttfamily [3/4]}}
{\footnotesize\ttfamily void Classify (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{data,  }\item[{arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{predictions }\end{DoxyParamCaption}) const}



Predict the classes of each point in the given dataset. 

If the random forest has not been trained, this will throw an exception.


\begin{DoxyParams}{Parameters}
{\em data} & Dataset to be classified. \\
\hline
{\em predictions} & Output predictions for each point in the dataset. \\
\hline
\end{DoxyParams}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a255d0a8b26a14cfd6a1f745568b83017}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Classify@{Classify}}
\index{Classify@{Classify}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Classify()\hspace{0.1cm}{\footnotesize\ttfamily [4/4]}}
{\footnotesize\ttfamily void Classify (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{data,  }\item[{arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{predictions,  }\item[{arma\+::mat \&}]{probabilities }\end{DoxyParamCaption}) const}



Predict the classes of each point in the given dataset, also returning the predicted class probabilities for each point. 

If the random forest has not been trained, this will throw an exception.


\begin{DoxyParams}{Parameters}
{\em data} & Dataset to be classified. \\
\hline
{\em predictions} & Output predictions for each point in the dataset. \\
\hline
{\em probabilities} & Output matrix of class probabilities for each point. \\
\hline
\end{DoxyParams}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_ac110b5de4a11acaeb25d011dcc9e23fe}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Num\+Trees@{Num\+Trees}}
\index{Num\+Trees@{Num\+Trees}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Num\+Trees()}
{\footnotesize\ttfamily size\+\_\+t Num\+Trees (\begin{DoxyParamCaption}{ }\end{DoxyParamCaption}) const\hspace{0.3cm}{\ttfamily [inline]}}



Get the number of trees in the forest. 



Definition at line 333 of file random\+\_\+forest.\+hpp.



References Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::serialize(), and Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Train().

\mbox{\label{classmlpack_1_1tree_1_1RandomForest_af0dd9205158ccf7bcfcd8ff81f79c927}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!serialize@{serialize}}
\index{serialize@{serialize}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{serialize()}
{\footnotesize\ttfamily void serialize (\begin{DoxyParamCaption}\item[{Archive \&}]{ar,  }\item[{const unsigned}]{int }\end{DoxyParamCaption})}



Serialize the random forest. 



Referenced by Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Num\+Trees().

\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a7685d43b0fe0dfdf1e0f067746eb85bb}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Train@{Train}}
\index{Train@{Train}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Train()\hspace{0.1cm}{\footnotesize\ttfamily [1/4]}}
{\footnotesize\ttfamily double Train (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{data,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Train the random forest on the given labeled training data with the given number of trees. 

The minimum\+Leaf\+Size and minimum\+Gain\+Split parameters are given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions.


\begin{DoxyParams}{Parameters}
{\em data} & Dataset to train on. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\begin{DoxyReturn}{Returns}
The average entropy of all the decision trees trained under forest. 
\end{DoxyReturn}


Referenced by Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Num\+Trees(), and Random\+Forest$<$ Fitness\+Function, Dimension\+Selection\+Type, Numeric\+Split\+Type, Categorical\+Split\+Type, Elem\+Type $>$\+::\+Random\+Forest().

\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a44769ae5d273955734472bb65b85d091}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Train@{Train}}
\index{Train@{Train}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Train()\hspace{0.1cm}{\footnotesize\ttfamily [2/4]}}
{\footnotesize\ttfamily double Train (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{data,  }\item[{const \textbf{ data\+::\+Dataset\+Info} \&}]{dataset\+Info,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Train the random forest on the given labeled training data with the given dataset info and the given number of trees. 

The minimum\+Leaf\+Size parameter is given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions. This overload can be used to train on categorical data.


\begin{DoxyParams}{Parameters}
{\em data} & Dataset to train on. \\
\hline
{\em dataset\+Info} & Dimension info for the dataset. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\begin{DoxyReturn}{Returns}
The average entropy of all the decision trees trained under forest. 
\end{DoxyReturn}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a649d980be0ba7a35aa08f468af8a4c5d}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Train@{Train}}
\index{Train@{Train}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Train()\hspace{0.1cm}{\footnotesize\ttfamily [3/4]}}
{\footnotesize\ttfamily double Train (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{data,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const arma\+::rowvec \&}]{weights,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Train the random forest on the given weighted labeled training data with the given number of trees. 

The minimum\+Leaf\+Size and minimum\+Gain\+Split parameters are given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions.


\begin{DoxyParams}{Parameters}
{\em data} & Dataset to train on. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em weights} & Weights (importances) of each point in the dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\begin{DoxyReturn}{Returns}
The average entropy of all the decision trees trained under forest. 
\end{DoxyReturn}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_aef33d0cc91b64bc1b016654c2144cec8}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Train@{Train}}
\index{Train@{Train}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Train()\hspace{0.1cm}{\footnotesize\ttfamily [4/4]}}
{\footnotesize\ttfamily double Train (\begin{DoxyParamCaption}\item[{const Mat\+Type \&}]{data,  }\item[{const \textbf{ data\+::\+Dataset\+Info} \&}]{dataset\+Info,  }\item[{const arma\+::\+Row$<$ size\+\_\+t $>$ \&}]{labels,  }\item[{const size\+\_\+t}]{num\+Classes,  }\item[{const arma\+::rowvec \&}]{weights,  }\item[{const size\+\_\+t}]{num\+Trees = {\ttfamily 20},  }\item[{const size\+\_\+t}]{minimum\+Leaf\+Size = {\ttfamily 1},  }\item[{const double}]{minimum\+Gain\+Split = {\ttfamily 1e-\/7},  }\item[{const size\+\_\+t}]{maximum\+Depth = {\ttfamily 0},  }\item[{Dimension\+Selection\+Type}]{dimension\+Selector = {\ttfamily DimensionSelectionType()} }\end{DoxyParamCaption})}



Train the random forest on the given weighted labeled training data with the given dataset info and the given number of trees. 

The minimum\+Leaf\+Size and minimum\+Gain\+Split parameters are given to each individual decision tree during tree building. Optionally, you may specify a Dimension\+Selection\+Type to set parameters for the strategy used to choose dimensions. This overload can be used for categorical weighted training.


\begin{DoxyParams}{Parameters}
{\em data} & Dataset to train on. \\
\hline
{\em dataset\+Info} & Dimension info for the dataset. \\
\hline
{\em labels} & Labels for dataset. \\
\hline
{\em num\+Classes} & Number of classes in dataset. \\
\hline
{\em weights} & Weights (importances) of each point in the dataset. \\
\hline
{\em num\+Trees} & Number of trees in the forest. \\
\hline
{\em minimum\+Leaf\+Size} & Minimum number of points in each tree\textquotesingle{}s leaf nodes. \\
\hline
{\em minimum\+Gain\+Split} & Minimum gain for splitting a decision tree node. \\
\hline
{\em maximum\+Depth} & Maximum depth for the tree. \\
\hline
{\em dimension\+Selector} & Instantiated dimension selection policy. \\
\hline
\end{DoxyParams}
\begin{DoxyReturn}{Returns}
The average entropy of all the decision trees trained under forest. 
\end{DoxyReturn}
\mbox{\label{classmlpack_1_1tree_1_1RandomForest_a1067c0af57c2a483b1d6db5364d03b2a}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Tree@{Tree}}
\index{Tree@{Tree}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Tree()\hspace{0.1cm}{\footnotesize\ttfamily [1/2]}}
{\footnotesize\ttfamily const \textbf{ Decision\+Tree\+Type}\& Tree (\begin{DoxyParamCaption}\item[{const size\+\_\+t}]{i }\end{DoxyParamCaption}) const\hspace{0.3cm}{\ttfamily [inline]}}



Access a tree in the forest. 



Definition at line 328 of file random\+\_\+forest.\+hpp.

\mbox{\label{classmlpack_1_1tree_1_1RandomForest_ae6046d73d87a67c78dcca6fc0450d53e}} 
\index{mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}!Tree@{Tree}}
\index{Tree@{Tree}!mlpack\+::tree\+::\+Random\+Forest@{mlpack\+::tree\+::\+Random\+Forest}}
\subsubsection{Tree()\hspace{0.1cm}{\footnotesize\ttfamily [2/2]}}
{\footnotesize\ttfamily \textbf{ Decision\+Tree\+Type}\& Tree (\begin{DoxyParamCaption}\item[{const size\+\_\+t}]{i }\end{DoxyParamCaption})\hspace{0.3cm}{\ttfamily [inline]}}



Modify a tree in the forest (be careful!). 



Definition at line 330 of file random\+\_\+forest.\+hpp.



The documentation for this class was generated from the following file\+:\begin{DoxyCompactItemize}
\item 
/var/www/mlpack.\+ratml.\+org/mlpack.\+org/\+\_\+src/mlpack-\/3.\+3.\+1/src/mlpack/methods/random\+\_\+forest/\textbf{ random\+\_\+forest.\+hpp}\end{DoxyCompactItemize}
