[mlpack] 298/324: Add header comments and clean up BiBTeX citation a bit.
Barak A. Pearlmutter
barak+git at cs.nuim.ie
Sun Aug 17 08:22:20 UTC 2014
This is an automated email from the git hooks/post-receive script.
bap pushed a commit to branch svn-trunk
in repository mlpack.
commit fab541936e0a9957535d0c5b8954314f1e1f8afd
Author: rcurtin <rcurtin at 9d5b8971-822b-0410-80eb-d18c1038ef23>
Date: Thu Aug 7 15:09:45 2014 +0000
Add header comments and clean up BiBTeX citation a bit.
git-svn-id: http://svn.cc.gatech.edu/fastlab/mlpack/trunk@16982 9d5b8971-822b-0410-80eb-d18c1038ef23
---
src/mlpack/methods/adaboost/adaboost_impl.hpp | 100 ++++++++++++--------------
1 file changed, 45 insertions(+), 55 deletions(-)
diff --git a/src/mlpack/methods/adaboost/adaboost_impl.hpp b/src/mlpack/methods/adaboost/adaboost_impl.hpp
index a6ed804..4a9d3c0 100644
--- a/src/mlpack/methods/adaboost/adaboost_impl.hpp
+++ b/src/mlpack/methods/adaboost/adaboost_impl.hpp
@@ -2,31 +2,21 @@
* @file adaboost_impl.hpp
* @author Udit Saxena
*
- * Implementation of the Adaboost class
+ * Implementation of the Adaboost class.
*
- * @code
- * @article{Schapire:1999:IBA:337859.337870,
- * author = {Schapire, Robert E. and Singer, Yoram},
- * title = {Improved Boosting Algorithms Using Confidence-rated Predictions},
- * journal = {Mach. Learn.},
- * issue_date = {Dec. 1999},
- * volume = {37},
- * number = {3},
- * month = dec,
- * year = {1999},
- * issn = {0885-6125},
- * pages = {297--336},
- * numpages = {40},
- * url = {http://dx.doi.org/10.1023/A:1007614523901},
- * doi = {10.1023/A:1007614523901},
- * acmid = {337870},
- * publisher = {Kluwer Academic Publishers},
- * address = {Hingham, MA, USA},
- * keywords = {boosting algorithms, decision trees, multiclass classification, output coding
- * }
- * @endcode
- *
-}
+ * @code
+ * @article{schapire1999improved,
+ * author = {Schapire, Robert E. and Singer, Yoram},
+ * title = {Improved Boosting Algorithms Using Confidence-rated Predictions},
+ * journal = {Machine Learning},
+ * volume = {37},
+ * number = {3},
+ * month = dec,
+ * year = {1999},
+ * issn = {0885-6125},
+ * pages = {297--336},
+ * }
+ * @endcode
*/
#ifndef _MLPACK_METHODS_ADABOOST_ADABOOST_IMPL_HPP
@@ -38,14 +28,14 @@ namespace mlpack {
namespace adaboost {
/**
* Constructor. Currently runs the Adaboost.mh algorithm
- *
+ *
* @param data Input data
* @param labels Corresponding labels
- * @param iterations Number of boosting rounds
+ * @param iterations Number of boosting rounds
* @param other Weak Learner, which has been initialized already
*/
template<typename MatType, typename WeakLearner>
-Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
+Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
const arma::Row<size_t>& labels, int iterations, double tol,
const WeakLearner& other)
{
@@ -56,25 +46,25 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
double rt, crt, alphat = 0.0, zt;
// double tolerance = 1e-8;
// std::cout<<"Tolerance is "<<tolerance<<"\n";
- // crt is for stopping the iterations when rt
+ // crt is for stopping the iterations when rt
// stops changing by less than a tolerant value.
-
- ztAccumulator = 1.0;
-
+
+ ztAccumulator = 1.0;
+
// To be used for prediction by the Weak Learner for prediction.
arma::Row<size_t> predictedLabels(labels.n_cols);
-
+
// Use tempData to modify input Data for incorporating weights.
MatType tempData(data);
-
+
// Build the classification Matrix yt from labels
arma::mat yt(predictedLabels.n_cols, numClasses);
-
+
// Build a classification matrix of the form D(i,l)
// where i is the ith instance
// l is the lth class.
buildClassificationMatrix(yt, labels);
-
+
// ht(x), to be loaded after a round of prediction every time the weak
// learner is run, by using the buildClassificationMatrix function
arma::mat ht(predictedLabels.n_cols, numClasses);
@@ -82,16 +72,16 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
// This matrix is a helper matrix used to calculate the final hypothesis.
arma::mat sumFinalH(predictedLabels.n_cols, numClasses);
sumFinalH.fill(0.0);
-
+
// load the initial weights into a 2-D matrix
const double initWeight = (double) 1 / (data.n_cols * numClasses);
arma::mat D(data.n_cols, numClasses);
D.fill(initWeight);
-
+
// Weights are to be compressed into this rowvector
// for focussing on the perceptron weights.
arma::rowvec weights(predictedLabels.n_cols);
-
+
// This is the final hypothesis.
arma::Row<size_t> finalH(predictedLabels.n_cols);
@@ -100,21 +90,21 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
{
// std::cout<<"Run "<<i<<" times.\n";
// Initialized to zero in every round.
- rt = 0.0;
+ rt = 0.0;
zt = 0.0;
-
+
// Build the weight vectors
buildWeightMatrix(D, weights);
-
+
// call the other weak learner and train the labels.
WeakLearner w(other, tempData, weights, labels);
w.Classify(tempData, predictedLabels);
//Now from predictedLabels, build ht, the weak hypothesis
buildClassificationMatrix(ht, predictedLabels);
-
+
// Now, start calculation of alpha(t) using ht
-
+
// begin calculation of rt
for (j = 0;j < ht.n_rows; j++)
@@ -134,32 +124,32 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
alphat = 0.5 * log((1 + rt) / (1 - rt));
// end calculation of alphat
-
+
// now start modifying weights
for (j = 0;j < D.n_rows; j++)
{
for (k = 0;k < D.n_cols; k++)
- {
+ {
// we calculate zt, the normalization constant
zt += D(j,k) * exp(-1 * alphat * yt(j,k) * ht(j,k));
D(j,k) = D(j,k) * exp(-1 * alphat * yt(j,k) * ht(j,k));
- // adding to the matrix of FinalHypothesis
+ // adding to the matrix of FinalHypothesis
sumFinalH(j,k) += (alphat * ht(j,k));
}
}
-
+
// normalization of D
D = D / zt;
-
+
// Accumulating the value of zt for the Hamming Loss bound.
ztAccumulator *= zt;
}
// Iterations are over, now build a strong hypothesis
// from a weighted combination of these weak hypotheses.
-
+
arma::rowvec tempSumFinalH;
arma::uword max_index;
for (i = 0;i < sumFinalH.n_rows; i++)
@@ -172,8 +162,8 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
}
/**
- * This function helps in building a classification Matrix which is of
- * form:
+ * This function helps in building a classification Matrix which is of
+ * form:
* -1 if l is not the correct label
* 1 if l is the correct label
*
@@ -200,10 +190,10 @@ void Adaboost<MatType, WeakLearner>::buildClassificationMatrix(
/**
* This function helps in building the Weight Distribution matrix
- * which is updated during every iteration. It calculates the
- * "difficulty" in classifying a point by adding the weights for all
+ * which is updated during every iteration. It calculates the
+ * "difficulty" in classifying a point by adding the weights for all
* instances, using D.
- *
+ *
* @param D The 2 Dimensional weight matrix from which the weights are
* to be calculated.
* @param weights The output weight vector.
@@ -225,4 +215,4 @@ void Adaboost<MatType, WeakLearner>::buildWeightMatrix(
} // namespace adaboost
} // namespace mlpack
-#endif
\ No newline at end of file
+#endif
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/mlpack.git
More information about the debian-science-commits
mailing list