[mlpack] 229/324: New tests added for adaboost.
Barak A. Pearlmutter
barak+git at cs.nuim.ie
Sun Aug 17 08:22:13 UTC 2014
This is an automated email from the git hooks/post-receive script.
bap pushed a commit to branch svn-trunk
in repository mlpack.
commit 2b32e08a4f34678d14135903f70f00d262de960d
Author: saxena.udit <saxena.udit at 9d5b8971-822b-0410-80eb-d18c1038ef23>
Date: Sun Jul 27 19:37:49 2014 +0000
New tests added for adaboost.
git-svn-id: http://svn.cc.gatech.edu/fastlab/mlpack/trunk@16875 9d5b8971-822b-0410-80eb-d18c1038ef23
---
src/mlpack/methods/adaboost/adaboost_impl.hpp | 18 +-
src/mlpack/tests/adaboost_test.cpp | 134 ++++++++++-
src/mlpack/tests/data/vc2.txt | 310 ++++++++++++++++++++++++++
src/mlpack/tests/data/vc2_labels.txt | 310 ++++++++++++++++++++++++++
4 files changed, 769 insertions(+), 3 deletions(-)
diff --git a/src/mlpack/methods/adaboost/adaboost_impl.hpp b/src/mlpack/methods/adaboost/adaboost_impl.hpp
index b39d229..3d0d663 100644
--- a/src/mlpack/methods/adaboost/adaboost_impl.hpp
+++ b/src/mlpack/methods/adaboost/adaboost_impl.hpp
@@ -53,7 +53,11 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
size_t numClasses = (arma::max(labels) - arma::min(labels)) + 1;
int i, j, k;
- double rt, alphat = 0.0, zt;
+ double rt, crt, alphat = 0.0, zt;
+ double tolerance = 1e-20;
+ // std::cout<<"Tolerance is "<<tolerance<<"\n";
+ // crt is for stopping the iterations when rt
+ // stops changing by less than a tolerant value.
ztAccumulator = 1.0;
@@ -94,6 +98,7 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
// now start the boosting rounds
for (i = 0; i < iterations; i++)
{
+ // std::cout<<"Run "<<i<<" times.\n";
// Initialized to zero in every round.
rt = 0.0;
zt = 0.0;
@@ -118,6 +123,17 @@ Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
rt += (D(j,k) * yt(j,k) * ht(j,k));
}
// end calculation of rt
+ // std::cout<<"Value of rt is: "<<rt<<"\n";
+
+ if (i > 0)
+ {
+ if ( (rt - crt) < tolerance)
+ {
+ // std::cout<<(rt-crt)<<"\n";
+ i = iterations;
+ }
+ }
+ crt = rt;
alphat = 0.5 * log((1 + rt) / (1 - rt));
// end calculation of alphat
diff --git a/src/mlpack/tests/adaboost_test.cpp b/src/mlpack/tests/adaboost_test.cpp
index 880c678..d613e21 100644
--- a/src/mlpack/tests/adaboost_test.cpp
+++ b/src/mlpack/tests/adaboost_test.cpp
@@ -22,7 +22,7 @@ BOOST_AUTO_TEST_SUITE(AdaboostTest);
* It checks whether the hamming loss breaches the upperbound, which
* is provided by ztAccumulator.
*/
-BOOST_AUTO_TEST_CASE(HammingLossBound)
+BOOST_AUTO_TEST_CASE(HammingLossBoundIris)
{
arma::mat inputData;
@@ -38,7 +38,7 @@ BOOST_AUTO_TEST_CASE(HammingLossBound)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptron_iter iterations.
- int perceptron_iter = 4000;
+ int perceptron_iter = 400;
perceptron::Perceptron<> p(inputData, labels.row(0), perceptron_iter);
@@ -54,4 +54,134 @@ BOOST_AUTO_TEST_CASE(HammingLossBound)
BOOST_REQUIRE(hammingLoss <= a.ztAccumulator);
}
+/**
+ * This test case runs the Adaboost.mh algorithm on the UCI Iris dataset.
+ * It checks if the error returned by running a single instance of the
+ * weak learner is worse than running the boosted weak learner using
+ * adaboost.
+ */
+BOOST_AUTO_TEST_CASE(WeakLearnerErrorIris)
+{
+ arma::mat inputData;
+
+ if (!data::Load("iris.txt", inputData))
+ BOOST_FAIL("Cannot load test dataset iris.txt!");
+
+ arma::Mat<size_t> labels;
+
+ if (!data::Load("iris_labels.txt",labels))
+ BOOST_FAIL("Cannot load labels for iris iris_labels.txt");
+
+ // no need to map the labels here
+
+ // Define your own weak learner, perceptron in this case.
+ // Run the perceptron for perceptron_iter iterations.
+ int perceptron_iter = 400;
+
+ arma::Row<size_t> perceptronPrediction(labels.n_cols);
+ perceptron::Perceptron<> p(inputData, labels.row(0), perceptron_iter);
+ p.Classify(inputData, perceptronPrediction);
+
+ int countWeakLearnerError = 0;
+ for (size_t i = 0; i < labels.n_cols; i++)
+ if(labels(i) != perceptronPrediction(i))
+ countWeakLearnerError++;
+ double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
+
+ // Define parameters for the adaboost
+ int iterations = 100;
+ Adaboost<> a(inputData, labels.row(0), iterations, p);
+ int countError = 0;
+ for (size_t i = 0; i < labels.n_cols; i++)
+ if(labels(i) != a.finalHypothesis(i))
+ countError++;
+ double error = (double) countError / labels.n_cols;
+
+ BOOST_REQUIRE(error <= weakLearnerErrorRate);
+}
+
+/**
+ * This test case runs the Adaboost.mh algorithm on the UCI Vertebral
+ * Column dataset.
+ * It checks whether the hamming loss breaches the upperbound, which
+ * is provided by ztAccumulator.
+ */
+BOOST_AUTO_TEST_CASE(HammingLossBoundVertebralColumn)
+{
+ arma::mat inputData;
+
+ if (!data::Load("vc2.txt", inputData))
+ BOOST_FAIL("Cannot load test dataset vc2.txt!");
+
+ arma::Mat<size_t> labels;
+
+ if (!data::Load("vc2_labels.txt",labels))
+ BOOST_FAIL("Cannot load labels for vc2_labels.txt");
+
+ // no need to map the labels here
+
+ // Define your own weak learner, perceptron in this case.
+ // Run the perceptron for perceptron_iter iterations.
+ int perceptron_iter = 800;
+
+ perceptron::Perceptron<> p(inputData, labels.row(0), perceptron_iter);
+
+ // Define parameters for the adaboost
+ int iterations = 50;
+ Adaboost<> a(inputData, labels.row(0), iterations, p);
+ int countError = 0;
+ for (size_t i = 0; i < labels.n_cols; i++)
+ if(labels(i) != a.finalHypothesis(i))
+ countError++;
+ double hammingLoss = (double) countError / labels.n_cols;
+
+ BOOST_REQUIRE(hammingLoss <= a.ztAccumulator);
+}
+
+/**
+ * This test case runs the Adaboost.mh algorithm on the UCI Vertebral
+ * Column dataset.
+ * It checks if the error returned by running a single instance of the
+ * weak learner is worse than running the boosted weak learner using
+ * adaboost.
+ */
+BOOST_AUTO_TEST_CASE(WeakLearnerErrorVertebralColumn)
+{
+ arma::mat inputData;
+
+ if (!data::Load("vc2.txt", inputData))
+ BOOST_FAIL("Cannot load test dataset vc2.txt!");
+
+ arma::Mat<size_t> labels;
+
+ if (!data::Load("vc2_labels.txt",labels))
+ BOOST_FAIL("Cannot load labels for vc2_labels.txt");
+
+ // no need to map the labels here
+
+ // Define your own weak learner, perceptron in this case.
+ // Run the perceptron for perceptron_iter iterations.
+ int perceptron_iter = 800;
+
+ arma::Row<size_t> perceptronPrediction(labels.n_cols);
+ perceptron::Perceptron<> p(inputData, labels.row(0), perceptron_iter);
+ p.Classify(inputData, perceptronPrediction);
+
+ int countWeakLearnerError = 0;
+ for (size_t i = 0; i < labels.n_cols; i++)
+ if(labels(i) != perceptronPrediction(i))
+ countWeakLearnerError++;
+ double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
+
+ // Define parameters for the adaboost
+ int iterations = 50;
+ Adaboost<> a(inputData, labels.row(0), iterations, p);
+ int countError = 0;
+ for (size_t i = 0; i < labels.n_cols; i++)
+ if(labels(i) != a.finalHypothesis(i))
+ countError++;
+ double error = (double) countError / labels.n_cols;
+
+ BOOST_REQUIRE(error <= weakLearnerErrorRate);
+}
BOOST_AUTO_TEST_SUITE_END();
\ No newline at end of file
diff --git a/src/mlpack/tests/data/vc2.txt b/src/mlpack/tests/data/vc2.txt
new file mode 100644
index 0000000..a181337
--- /dev/null
+++ b/src/mlpack/tests/data/vc2.txt
@@ -0,0 +1,310 @@
+63.03 22.55 39.61 40.48 98.67 -0.25
+39.06 10.06 25.02 29 114.41 4.56
+68.83 22.22 50.09 46.61 105.99 -3.53
+69.3 24.65 44.31 44.64 101.87 11.21
+49.71 9.65 28.32 40.06 108.17 7.92
+40.25 13.92 25.12 26.33 130.33 2.23
+53.43 15.86 37.17 37.57 120.57 5.99
+45.37 10.76 29.04 34.61 117.27 -10.68
+43.79 13.53 42.69 30.26 125 13.29
+36.69 5.01 41.95 31.68 84.24 0.66
+49.71 13.04 31.33 36.67 108.65 -7.83
+31.23 17.72 15.5 13.52 120.06 0.5
+48.92 19.96 40.26 28.95 119.32 8.03
+53.57 20.46 33.1 33.11 110.97 7.04
+57.3 24.19 47 33.11 116.81 5.77
+44.32 12.54 36.1 31.78 124.12 5.42
+63.83 20.36 54.55 43.47 112.31 -0.62
+31.28 3.14 32.56 28.13 129.01 3.62
+38.7 13.44 31 25.25 123.16 1.43
+41.73 12.25 30.12 29.48 116.59 -1.24
+43.92 14.18 37.83 29.74 134.46 6.45
+54.92 21.06 42.2 33.86 125.21 2.43
+63.07 24.41 54 38.66 106.42 15.78
+45.54 13.07 30.3 32.47 117.98 -4.99
+36.13 22.76 29 13.37 115.58 -3.24
+54.12 26.65 35.33 27.47 121.45 1.57
+26.15 10.76 14 15.39 125.2 -10.09
+43.58 16.51 47 27.07 109.27 8.99
+44.55 21.93 26.79 22.62 111.07 2.65
+66.88 24.89 49.28 41.99 113.48 -2.01
+50.82 15.4 42.53 35.42 112.19 10.87
+46.39 11.08 32.14 35.31 98.77 6.39
+44.94 17.44 27.78 27.49 117.98 5.57
+38.66 12.99 40 25.68 124.91 2.7
+59.6 32 46.56 27.6 119.33 1.47
+31.48 7.83 24.28 23.66 113.83 4.39
+32.09 6.99 36 25.1 132.26 6.41
+35.7 19.44 20.7 16.26 137.54 -0.26
+55.84 28.85 47.69 27 123.31 2.81
+52.42 19.01 35.87 33.41 116.56 1.69
+35.49 11.7 15.59 23.79 106.94 -3.46
+46.44 8.4 29.04 38.05 115.48 2.05
+53.85 19.23 32.78 34.62 121.67 5.33
+66.29 26.33 47.5 39.96 121.22 -0.8
+56.03 16.3 62.28 39.73 114.02 -2.33
+50.91 23.02 47 27.9 117.42 -2.53
+48.33 22.23 36.18 26.1 117.38 6.48
+41.35 16.58 30.71 24.78 113.27 -4.5
+40.56 17.98 34 22.58 121.05 -1.54
+41.77 17.9 20.03 23.87 118.36 2.06
+55.29 20.44 34 34.85 115.88 3.56
+74.43 41.56 27.7 32.88 107.95 5
+50.21 29.76 36.1 20.45 128.29 5.74
+30.15 11.92 34 18.23 112.68 11.46
+41.17 17.32 33.47 23.85 116.38 -9.57
+47.66 13.28 36.68 34.38 98.25 6.27
+43.35 7.47 28.07 35.88 112.78 5.75
+46.86 15.35 38 31.5 116.25 1.66
+43.2 19.66 35 23.54 124.85 -2.92
+48.11 14.93 35.56 33.18 124.06 7.95
+74.38 32.05 78.77 42.32 143.56 56.13
+89.68 32.7 83.13 56.98 129.96 92.03
+44.53 9.43 52 35.1 134.71 29.11
+77.69 21.38 64.43 56.31 114.82 26.93
+76.15 21.94 82.96 54.21 123.93 10.43
+83.93 41.29 62 42.65 115.01 26.59
+78.49 22.18 60 56.31 118.53 27.38
+75.65 19.34 64.15 56.31 95.9 69.55
+72.08 18.95 51 53.13 114.21 1.01
+58.6 -0.26 51.5 58.86 102.04 28.06
+72.56 17.39 52 55.18 119.19 32.11
+86.9 32.93 47.79 53.97 135.08 101.72
+84.97 33.02 60.86 51.95 125.66 74.33
+55.51 20.1 44 35.42 122.65 34.55
+72.22 23.08 91 49.14 137.74 56.8
+70.22 39.82 68.12 30.4 148.53 145.38
+86.75 36.04 69.22 50.71 139.41 110.86
+58.78 7.67 53.34 51.12 98.5 51.58
+67.41 17.44 60.14 49.97 111.12 33.16
+47.74 12.09 39 35.66 117.51 21.68
+77.11 30.47 69.48 46.64 112.15 70.76
+74.01 21.12 57.38 52.88 120.21 74.56
+88.62 29.09 47.56 59.53 121.76 51.81
+81.1 24.79 77.89 56.31 151.84 65.21
+76.33 42.4 57.2 33.93 124.27 50.13
+45.44 9.91 45 35.54 163.07 20.32
+59.79 17.88 59.21 41.91 119.32 22.12
+44.91 10.22 44.63 34.7 130.08 37.36
+56.61 16.8 42 39.81 127.29 24.02
+71.19 23.9 43.7 47.29 119.86 27.28
+81.66 28.75 58.23 52.91 114.77 30.61
+70.95 20.16 62.86 50.79 116.18 32.52
+85.35 15.84 71.67 69.51 124.42 76.02
+58.1 14.84 79.65 43.26 113.59 50.24
+94.17 15.38 67.71 78.79 114.89 53.26
+57.52 33.65 50.91 23.88 140.98 148.75
+96.66 19.46 90.21 77.2 120.67 64.08
+74.72 19.76 82.74 54.96 109.36 33.31
+77.66 22.43 93.89 55.22 123.06 61.21
+58.52 13.92 41.47 44.6 115.51 30.39
+84.59 30.36 65.48 54.22 108.01 25.12
+79.94 18.77 63.31 61.16 114.79 38.54
+70.4 13.47 61.2 56.93 102.34 25.54
+49.78 6.47 53 43.32 110.86 25.34
+77.41 29.4 63.23 48.01 118.45 93.56
+65.01 27.6 50.95 37.41 116.58 7.02
+65.01 9.84 57.74 55.18 94.74 49.7
+78.43 33.43 76.28 45 138.55 77.16
+63.17 6.33 63 56.84 110.64 42.61
+68.61 15.08 63.01 53.53 123.43 39.5
+63.9 13.71 62.12 50.19 114.13 41.42
+85 29.61 83.35 55.39 126.91 71.32
+42.02 -6.55 67.9 48.58 111.59 27.34
+69.76 19.28 48.5 50.48 96.49 51.17
+80.99 36.84 86.96 44.14 141.09 85.87
+129.83 8.4 48.38 121.43 107.69 418.54
+70.48 12.49 62.42 57.99 114.19 56.9
+86.04 38.75 47.87 47.29 122.09 61.99
+65.54 24.16 45.78 41.38 136.44 16.38
+60.75 15.75 43.2 45 113.05 31.69
+54.74 12.1 41 42.65 117.64 40.38
+83.88 23.08 87.14 60.8 124.65 80.56
+80.07 48.07 52.4 32.01 110.71 67.73
+65.67 10.54 56.49 55.12 109.16 53.93
+74.72 14.32 32.5 60.4 107.18 37.02
+48.06 5.69 57.06 42.37 95.44 32.84
+70.68 21.7 59.18 48.97 103.01 27.81
+80.43 17 66.54 63.43 116.44 57.78
+90.51 28.27 69.81 62.24 100.89 58.82
+77.24 16.74 49.78 60.5 110.69 39.79
+50.07 9.12 32.17 40.95 99.71 26.77
+69.78 13.78 58 56 118.93 17.91
+69.63 21.12 52.77 48.5 116.8 54.82
+81.75 20.12 70.56 61.63 119.43 55.51
+52.2 17.21 78.09 34.99 136.97 54.94
+77.12 30.35 77.48 46.77 110.61 82.09
+88.02 39.84 81.77 48.18 116.6 56.77
+83.4 34.31 78.42 49.09 110.47 49.67
+72.05 24.7 79.87 47.35 107.17 56.43
+85.1 21.07 91.73 64.03 109.06 38.03
+69.56 15.4 74.44 54.16 105.07 29.7
+89.5 48.9 72 40.6 134.63 118.35
+85.29 18.28 100.74 67.01 110.66 58.88
+60.63 20.6 64.54 40.03 117.23 104.86
+60.04 14.31 58.04 45.73 105.13 30.41
+85.64 42.69 78.75 42.95 105.14 42.89
+85.58 30.46 78.23 55.12 114.87 68.38
+55.08 -3.76 56 58.84 109.92 31.77
+65.76 9.83 50.82 55.92 104.39 39.31
+79.25 23.94 40.8 55.3 98.62 36.71
+81.11 20.69 60.69 60.42 94.02 40.51
+48.03 3.97 58.34 44.06 125.35 35
+63.4 14.12 48.14 49.29 111.92 31.78
+57.29 15.15 64 42.14 116.74 30.34
+41.19 5.79 42.87 35.39 103.35 27.66
+66.8 14.55 72.08 52.25 82.46 41.69
+79.48 26.73 70.65 52.74 118.59 61.7
+44.22 1.51 46.11 42.71 108.63 42.81
+57.04 0.35 49.2 56.69 103.05 52.17
+64.27 12.51 68.7 51.77 95.25 39.41
+92.03 35.39 77.42 56.63 115.72 58.06
+67.26 7.19 51.7 60.07 97.8 42.14
+118.14 38.45 50.84 79.7 81.02 74.04
+115.92 37.52 76.8 78.41 104.7 81.2
+53.94 9.31 43.1 44.64 124.4 25.08
+83.7 20.27 77.11 63.43 125.48 69.28
+56.99 6.87 57.01 50.12 109.98 36.81
+72.34 16.42 59.87 55.92 70.08 12.07
+95.38 24.82 95.16 70.56 89.31 57.66
+44.25 1.1 38 43.15 98.27 23.91
+64.81 15.17 58.84 49.64 111.68 21.41
+78.4 14.04 79.69 64.36 104.73 12.39
+56.67 13.46 43.77 43.21 93.69 21.11
+50.83 9.06 56.3 41.76 79 23.04
+61.41 25.38 39.1 36.03 103.4 21.84
+56.56 8.96 52.58 47.6 98.78 50.7
+67.03 13.28 66.15 53.75 100.72 33.99
+80.82 19.24 61.64 61.58 89.47 44.17
+80.65 26.34 60.9 54.31 120.1 52.47
+68.72 49.43 68.06 19.29 125.02 54.69
+37.9 4.48 24.71 33.42 157.85 33.61
+64.62 15.23 67.63 49.4 90.3 31.33
+75.44 31.54 89.6 43.9 106.83 54.97
+71 37.52 84.54 33.49 125.16 67.77
+81.06 20.8 91.78 60.26 125.43 38.18
+91.47 24.51 84.62 66.96 117.31 52.62
+81.08 21.26 78.77 59.83 90.07 49.16
+60.42 5.27 59.81 55.15 109.03 30.27
+85.68 38.65 82.68 47.03 120.84 61.96
+82.41 29.28 77.05 53.13 117.04 62.77
+43.72 9.81 52 33.91 88.43 40.88
+86.47 40.3 61.14 46.17 97.4 55.75
+74.47 33.28 66.94 41.19 146.47 124.98
+70.25 10.34 76.37 59.91 119.24 32.67
+72.64 18.93 68 53.71 116.96 25.38
+71.24 5.27 86 65.97 110.7 38.26
+63.77 12.76 65.36 51.01 89.82 56
+58.83 37.58 125.74 21.25 135.63 117.31
+74.85 13.91 62.69 60.95 115.21 33.17
+75.3 16.67 61.3 58.63 118.88 31.58
+63.36 20.02 67.5 43.34 131 37.56
+67.51 33.28 96.28 34.24 145.6 88.3
+76.31 41.93 93.28 34.38 132.27 101.22
+73.64 9.71 63 63.92 98.73 26.98
+56.54 14.38 44.99 42.16 101.72 25.77
+80.11 33.94 85.1 46.17 125.59 100.29
+95.48 46.55 59 48.93 96.68 77.28
+74.09 18.82 76.03 55.27 128.41 73.39
+87.68 20.37 93.82 67.31 120.94 76.73
+48.26 16.42 36.33 31.84 94.88 28.34
+38.51 16.96 35.11 21.54 127.63 7.99
+54.92 18.97 51.6 35.95 125.85 2
+44.36 8.95 46.9 35.42 129.22 4.99
+48.32 17.45 48 30.87 128.98 -0.91
+45.7 10.66 42.58 35.04 130.18 -3.39
+30.74 13.35 35.9 17.39 142.41 -2.01
+50.91 6.68 30.9 44.24 118.15 -1.06
+38.13 6.56 50.45 31.57 132.11 6.34
+51.62 15.97 35 35.66 129.39 1.01
+64.31 26.33 50.96 37.98 106.18 3.12
+44.49 21.79 31.47 22.7 113.78 -0.28
+54.95 5.87 53 49.09 126.97 -0.63
+56.1 13.11 62.64 43 116.23 31.17
+69.4 18.9 75.97 50.5 103.58 -0.44
+89.83 22.64 90.56 67.2 100.5 3.04
+59.73 7.72 55.34 52 125.17 3.24
+63.96 16.06 63.12 47.9 142.36 6.3
+61.54 19.68 52.89 41.86 118.69 4.82
+38.05 8.3 26.24 29.74 123.8 3.89
+43.44 10.1 36.03 33.34 137.44 -3.11
+65.61 23.14 62.58 42.47 124.13 -4.08
+53.91 12.94 39 40.97 118.19 5.07
+43.12 13.82 40.35 29.3 128.52 0.97
+40.68 9.15 31.02 31.53 139.12 -2.51
+37.73 9.39 42 28.35 135.74 13.68
+63.93 19.97 40.18 43.96 113.07 -11.06
+61.82 13.6 64 48.22 121.78 1.3
+62.14 13.96 58 48.18 133.28 4.96
+69 13.29 55.57 55.71 126.61 10.83
+56.45 19.44 43.58 37 139.19 -1.86
+41.65 8.84 36.03 32.81 116.56 -6.05
+51.53 13.52 35 38.01 126.72 13.93
+39.09 5.54 26.93 33.55 131.58 -0.76
+34.65 7.51 43 27.14 123.99 -4.08
+63.03 27.34 51.61 35.69 114.51 7.44
+47.81 10.69 54 37.12 125.39 -0.4
+46.64 15.85 40 30.78 119.38 9.06
+49.83 16.74 28 33.09 121.44 1.91
+47.32 8.57 35.56 38.75 120.58 1.63
+50.75 20.24 37 30.52 122.34 2.29
+36.16 -0.81 33.63 36.97 135.94 -2.09
+40.75 1.84 50 38.91 139.25 0.67
+42.92 -5.85 58 48.76 121.61 -3.36
+63.79 21.35 66 42.45 119.55 12.38
+72.96 19.58 61.01 53.38 111.23 0.81
+67.54 14.66 58 52.88 123.63 25.97
+54.75 9.75 48 45 123.04 8.24
+50.16 -2.97 42 53.13 131.8 -8.29
+40.35 10.19 37.97 30.15 128.01 0.46
+63.62 16.93 49.35 46.68 117.09 -0.36
+54.14 11.94 43 42.21 122.21 0.15
+74.98 14.92 53.73 60.05 105.65 1.59
+42.52 14.38 25.32 28.14 128.91 0.76
+33.79 3.68 25.5 30.11 128.33 -1.78
+54.5 6.82 47 47.68 111.79 -4.41
+48.17 9.59 39.71 38.58 135.62 5.36
+46.37 10.22 42.7 36.16 121.25 -0.54
+52.86 9.41 46.99 43.45 123.09 1.86
+57.15 16.49 42.84 40.66 113.81 5.02
+37.14 16.48 24 20.66 125.01 7.37
+51.31 8.88 57 42.44 126.47 -2.14
+42.52 16.54 42 25.97 120.63 7.88
+39.36 7.01 37 32.35 117.82 1.9
+35.88 1.11 43.46 34.77 126.92 -1.63
+43.19 9.98 28.94 33.22 123.47 1.74
+67.29 16.72 51 50.57 137.59 4.96
+51.33 13.63 33.26 37.69 131.31 1.79
+65.76 13.21 44 52.55 129.39 -1.98
+40.41 -1.33 30.98 41.74 119.34 -6.17
+48.8 18.02 52 30.78 139.15 10.44
+50.09 13.43 34.46 36.66 119.13 3.09
+64.26 14.5 43.9 49.76 115.39 5.95
+53.68 13.45 41.58 40.24 113.91 2.74
+49 13.11 51.87 35.88 126.4 0.54
+59.17 14.56 43.2 44.6 121.04 2.83
+67.8 16.55 43.26 51.25 119.69 4.87
+61.73 17.11 46.9 44.62 120.92 3.09
+33.04 -0.32 19.07 33.37 120.39 9.35
+74.57 15.72 58.62 58.84 105.42 0.6
+44.43 14.17 32.24 30.26 131.72 -3.6
+36.42 13.88 20.24 22.54 126.08 0.18
+51.08 14.21 35.95 36.87 115.8 6.91
+34.76 2.63 29.5 32.12 127.14 -0.46
+48.9 5.59 55.5 43.32 137.11 19.85
+46.24 10.06 37 36.17 128.06 -5.1
+46.43 6.62 48.1 39.81 130.35 2.45
+39.66 16.21 36.67 23.45 131.92 -4.97
+45.58 18.76 33.77 26.82 116.8 3.13
+66.51 20.9 31.73 45.61 128.9 1.52
+82.91 29.89 58.25 53.01 110.71 6.08
+50.68 6.46 35 44.22 116.59 -0.21
+89.01 26.08 69.02 62.94 111.48 6.06
+54.6 21.49 29.36 33.11 118.34 -1.47
+34.38 2.06 32.39 32.32 128.3 -3.37
+45.08 12.31 44.58 32.77 147.89 -8.94
+47.9 13.62 36 34.29 117.45 -4.25
+53.94 20.72 29.22 33.22 114.37 -0.42
+61.45 22.69 46.17 38.75 125.67 -2.71
+45.25 8.69 41.58 36.56 118.55 0.21
+33.84 5.07 36.64 28.77 123.95 -0.2
diff --git a/src/mlpack/tests/data/vc2_labels.txt b/src/mlpack/tests/data/vc2_labels.txt
new file mode 100644
index 0000000..7601f70
--- /dev/null
+++ b/src/mlpack/tests/data/vc2_labels.txt
@@ -0,0 +1,310 @@
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/mlpack.git
More information about the debian-science-commits
mailing list