[mlpack] 25/44: Backport r17432:17437.
Barak A. Pearlmutter
barak+git at pearlmutter.net
Mon Feb 15 19:35:54 UTC 2016
This is an automated email from the git hooks/post-receive script.
bap pushed a commit to tag mlpack-1.0.11
in repository mlpack.
commit bfa6fa9f6de63f66eff86fa50e160293e635b57a
Author: Ryan Curtin <ryan at ratml.org>
Date: Sun Dec 7 19:47:07 2014 +0000
Backport r17432:17437.
---
HISTORY.txt | 3 +
.../methods/sparse_coding/sparse_coding_impl.hpp | 4 +-
src/mlpack/tests/cosine_tree_test.cpp | 72 ++++++++++------------
src/mlpack/tests/logistic_regression_test.cpp | 7 ++-
src/mlpack/tests/sa_test.cpp | 2 +-
src/mlpack/tests/svd_batch_test.cpp | 6 +-
6 files changed, 47 insertions(+), 47 deletions(-)
diff --git a/HISTORY.txt b/HISTORY.txt
index d0ce8c4..a906829 100644
--- a/HISTORY.txt
+++ b/HISTORY.txt
@@ -18,6 +18,9 @@
* math::RandomSeed() now sets the random seed for recent (>=3.930) Armadillo
versions.
+ * Handle Newton method convergence better for
+ SparseCoding::OptimizeDictionary() and make maximum iterations a parameter.
+
2014-08-29 mlpack 1.0.10
* Bugfix for NeighborSearch regression which caused very slow allknn/allkfn.
diff --git a/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp b/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp
index d62e13a..2ffc32d 100644
--- a/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp
+++ b/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp
@@ -270,7 +270,7 @@ double SparseCoding<DictionaryInitializer>::OptimizeDictionary(
<< "." << std::endl;
Log::Debug << " Improvement: " << std::scientific << improvement << ".\n";
- if (improvement < newtonTolerance)
+ if (normGradient < newtonTolerance)
converged = true;
}
@@ -307,7 +307,7 @@ double SparseCoding<DictionaryInitializer>::OptimizeDictionary(
}
}
}
- //printf("final reconstruction error: %e\n", norm(data - dictionary * codes, "fro"));
+
return normGradient;
}
diff --git a/src/mlpack/tests/cosine_tree_test.cpp b/src/mlpack/tests/cosine_tree_test.cpp
index 1e410f1..d4edd14 100644
--- a/src/mlpack/tests/cosine_tree_test.cpp
+++ b/src/mlpack/tests/cosine_tree_test.cpp
@@ -45,13 +45,13 @@ BOOST_AUTO_TEST_CASE(CosineTreeNoSplit)
// Make a random dataset.
arma::mat data = arma::randu(numRows, numCols);
-
+
// Make a cosine tree, with the generated dataset and the defined constants.
// Note that the value of epsilon is one.
CosineTree ctree(data, epsilon, delta);
arma::mat basis;
ctree.GetFinalBasis(basis);
-
+
// Since epsilon is one, there should be no splitting and the only vector in
// the basis should come from the root node.
BOOST_REQUIRE_EQUAL(basis.n_cols, 1);
@@ -66,74 +66,68 @@ BOOST_AUTO_TEST_CASE(CosineNodeCosineSplit)
// Intialize constants required for the test.
const size_t numRows = 500;
const size_t numCols = 1000;
-
+
// Make a random dataset and the root object.
arma::mat data = arma::randu(numRows, numCols);
CosineTree root(data);
-
+
// Stack for depth first search of the tree.
std::vector<CosineTree*> nodeStack;
nodeStack.push_back(&root);
-
+
// While stack is not empty.
- while(nodeStack.size())
+ while (nodeStack.size())
{
// Pop a node from the stack and split it.
CosineTree *currentNode, *currentLeft, *currentRight;
currentNode = nodeStack.back();
currentNode->CosineNodeSplit();
nodeStack.pop_back();
-
+
// Obtain pointers to the children of the node.
currentLeft = currentNode->Left();
currentRight = currentNode->Right();
-
+
// If children exist.
- if(currentLeft && currentRight)
+ if (currentLeft && currentRight)
{
// Push the child nodes on to the stack.
nodeStack.push_back(currentLeft);
nodeStack.push_back(currentRight);
-
+
// Obtain the split point of the popped node.
arma::vec splitPoint = data.col(currentNode->SplitPointIndex());
-
+
// Column indices of the the child nodes.
std::vector<size_t> leftIndices, rightIndices;
leftIndices = currentLeft->VectorIndices();
rightIndices = currentRight->VectorIndices();
-
+
// The columns in the popped should be split into left and right nodes.
BOOST_REQUIRE_EQUAL(currentNode->NumColumns(), leftIndices.size() +
rightIndices.size());
-
+
// Calculate the cosine values for each of the columns in the node.
arma::vec cosines;
cosines.zeros(currentNode->NumColumns());
-
+
size_t i, j, k;
- for(i = 0; i < leftIndices.size(); i++)
- {
+ for (i = 0; i < leftIndices.size(); i++)
cosines(i) = arma::norm_dot(data.col(leftIndices[i]), splitPoint);
- }
- for(j = 0, k = i; j < rightIndices.size(); j++, k++)
- {
+
+ for (j = 0, k = i; j < rightIndices.size(); j++, k++)
cosines(k) = arma::norm_dot(data.col(rightIndices[j]), splitPoint);
- }
-
+
// Check if the columns assigned to the children agree with the splitting
// condition.
double cosineMax = arma::max(cosines % (cosines < 1));
double cosineMin = arma::min(cosines);
-
- for(i = 0; i < leftIndices.size(); i++)
- {
+
+ for (i = 0; i < leftIndices.size(); i++)
BOOST_CHECK_LT(cosineMax - cosines(i), cosines(i) - cosineMin);
- }
- for(j = 0, k = i; j < rightIndices.size(); j++, k++)
- {
+
+ for (j = 0, k = i; j < rightIndices.size(); j++, k++)
BOOST_CHECK_GT(cosineMax - cosines(k), cosines(k) - cosineMin);
- }
}
}
}
@@ -149,51 +143,51 @@ BOOST_AUTO_TEST_CASE(CosineTreeModifiedGramSchmidt)
const size_t numCols = 50;
const double epsilon = 1;
const double delta = 0.1;
-
+
// Make a random dataset.
arma::mat data = arma::randu(numRows, numCols);
-
+
// Declare a queue and a dummy CosineTree object.
CosineNodeQueue basisQueue;
CosineTree dummyTree(data, epsilon, delta);
-
+
for(size_t i = 0; i < numCols; i++)
{
// Make a new CosineNode object.
CosineTree* basisNode;
basisNode = new CosineTree(data);
-
+
// Use the columns of the dataset as random centroids.
arma::vec centroid = data.col(i);
arma::vec newBasisVector;
-
+
// Obtain the orthonormalized version of the centroid.
- dummyTree.ModifiedGramSchmidt(basisQueue, centroid, newBasisVector);
-
+ dummyTree.ModifiedGramSchmidt(basisQueue, centroid, newBasisVector);
+
// Check if the obtained vector is orthonormal to the basis vectors.
CosineNodeQueue::const_iterator j = basisQueue.begin();
CosineTree* currentNode;
-
+
for(; j != basisQueue.end(); j++)
{
currentNode = *j;
BOOST_REQUIRE_SMALL(arma::dot(currentNode->BasisVector(), newBasisVector),
1e-5);
}
-
+
// Add the obtained vector to the basis.
basisNode->BasisVector(newBasisVector);
basisNode->L2Error(arma::randu());
basisQueue.push(basisNode);
}
-
+
// Deallocate memory given to the objects.
for(size_t i = 0; i < numCols; i++)
{
CosineTree* currentNode;
currentNode = basisQueue.top();
basisQueue.pop();
-
+
delete currentNode;
}
}
diff --git a/src/mlpack/tests/logistic_regression_test.cpp b/src/mlpack/tests/logistic_regression_test.cpp
index 1039150..fa1f3ef 100644
--- a/src/mlpack/tests/logistic_regression_test.cpp
+++ b/src/mlpack/tests/logistic_regression_test.cpp
@@ -556,8 +556,11 @@ BOOST_AUTO_TEST_CASE(LogisticRegressionSGDRegularizationSimpleTest)
"1 2 3");
arma::vec responses("1 1 0");
- // Create a logistic regression object using SGD.
- LogisticRegression<SGD> lr(data, responses, 0.001);
+ // Create a logistic regression object using custom SGD with a much smaller
+ // tolerance.
+ LogisticRegressionFunction lrf(data, responses, 0.001);
+ SGD<LogisticRegressionFunction> sgd(lrf, 0.005, 500000, 1e-10);
+ LogisticRegression<SGD> lr(sgd);
// Test sigmoid function.
arma::vec sigmoids = 1 / (1 + arma::exp(-lr.Parameters()[0]
diff --git a/src/mlpack/tests/sa_test.cpp b/src/mlpack/tests/sa_test.cpp
index e73ff8b..8b090fa 100644
--- a/src/mlpack/tests/sa_test.cpp
+++ b/src/mlpack/tests/sa_test.cpp
@@ -58,7 +58,7 @@ BOOST_AUTO_TEST_CASE(GeneralizedRosenbrockTest)
result = sa.Optimize(coordinates);
++iteration;
- BOOST_REQUIRE_LT(iteration, 3); // No more than three tries.
+ BOOST_REQUIRE_LT(iteration, 4); // No more than three tries.
}
// 0.1% tolerance for each coordinate.
diff --git a/src/mlpack/tests/svd_batch_test.cpp b/src/mlpack/tests/svd_batch_test.cpp
index 1e02eea..088047a 100644
--- a/src/mlpack/tests/svd_batch_test.cpp
+++ b/src/mlpack/tests/svd_batch_test.cpp
@@ -174,14 +174,14 @@ BOOST_AUTO_TEST_CASE(SVDBatchNegativeElementTest)
RandomInitialization,
SVDBatchLearning> amf(SimpleToleranceTermination<mat>(),
RandomInitialization(),
- SVDBatchLearning(0.3, 0.001, 0.001, 0));
+ SVDBatchLearning(0.1, 0.001, 0.001, 0));
mat m1, m2;
amf.Apply(test, 3, m1, m2);
arma::mat result = m1 * m2;
- // 2% tolerance on the norm.
- BOOST_REQUIRE_CLOSE(arma::norm(test, "fro"), arma::norm(result, "fro"), 2.0);
+ // 5% tolerance on the norm.
+ BOOST_REQUIRE_CLOSE(arma::norm(test, "fro"), arma::norm(result, "fro"), 5.0);
}
BOOST_AUTO_TEST_SUITE_END();
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/mlpack.git
More information about the debian-science-commits
mailing list