[mlpack] 263/324: Changes to work with new, hierarchical GMMs

Barak A. Pearlmutter barak+git at cs.nuim.ie
Sun Aug 17 08:22:16 UTC 2014


This is an automated email from the git hooks/post-receive script.

bap pushed a commit to branch svn-trunk
in repository mlpack.

commit e711d30b72b38f80dfb02eb9e6872718e35b202c
Author: michaelfox99 <michaelfox99 at 9d5b8971-822b-0410-80eb-d18c1038ef23>
Date:   Tue Aug 5 12:55:06 2014 +0000

    Changes to work with new, hierarchical GMMs
    
    
    git-svn-id: http://svn.cc.gatech.edu/fastlab/mlpack/trunk@16947 9d5b8971-822b-0410-80eb-d18c1038ef23
---
 src/mlpack/methods/gmm/em_fit_impl.hpp | 102 +++++++++++++++------------------
 1 file changed, 47 insertions(+), 55 deletions(-)

diff --git a/src/mlpack/methods/gmm/em_fit_impl.hpp b/src/mlpack/methods/gmm/em_fit_impl.hpp
index d94b016..c1457dd 100644
--- a/src/mlpack/methods/gmm/em_fit_impl.hpp
+++ b/src/mlpack/methods/gmm/em_fit_impl.hpp
@@ -1,6 +1,7 @@
 /**
  * @file em_fit_impl.hpp
  * @author Ryan Curtin
+ * @author Michael Fox
  *
  * Implementation of EM algorithm for fitting GMMs.
  */
@@ -10,9 +11,6 @@
 // In case it hasn't been included yet.
 #include "em_fit.hpp"
 
-// Definition of phi().
-#include "phi.hpp"
-
 namespace mlpack {
 namespace gmm {
 
@@ -32,22 +30,21 @@ EMFit<InitialClusteringType, CovarianceConstraintPolicy>::EMFit(
 template<typename InitialClusteringType, typename CovarianceConstraintPolicy>
 void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
     const arma::mat& observations,
-    std::vector<arma::vec>& means,
-    std::vector<arma::mat>& covariances,
+    std::vector<distribution::GaussianDistribution>& dists,
     arma::vec& weights,
     const bool useInitialModel)
 {
   // Only perform initial clustering if the user wanted it.
   if (!useInitialModel)
-    InitialClustering(observations, means, covariances, weights);
+    InitialClustering(observations, dists, weights);
 
-  double l = LogLikelihood(observations, means, covariances, weights);
+  double l = LogLikelihood(observations, dists, weights);
 
   Log::Debug << "EMFit::Estimate(): initial clustering log-likelihood: "
       << l << std::endl;
 
   double lOld = -DBL_MAX;
-  arma::mat condProb(observations.n_cols, means.size());
+  arma::mat condProb(observations.n_cols, dists.size());
 
   // Iterate to update the model until no more improvement is found.
   size_t iteration = 1;
@@ -58,12 +55,12 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
 
     // Calculate the conditional probabilities of choosing a particular
     // Gaussian given the observations and the present theta value.
-    for (size_t i = 0; i < means.size(); i++)
+    for (size_t i = 0; i < dists.size(); i++)
     {
       // Store conditional probabilities into condProb vector for each
       // Gaussian.  First we make an alias of the condProb vector.
       arma::vec condProbAlias = condProb.unsafe_col(i);
-      phi(observations, means[i], covariances[i], condProbAlias);
+      dists[i].Probability(observations, condProbAlias);
       condProbAlias *= weights[i];
     }
 
@@ -82,25 +79,25 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
 
     // Calculate the new value of the means using the updated conditional
     // probabilities.
-    for (size_t i = 0; i < means.size(); i++)
+    for (size_t i = 0; i < dists.size(); i++)
     {
       // Don't update if there's no probability of the Gaussian having points.
       if (probRowSums[i] != 0)
-        means[i] = (observations * condProb.col(i)) / probRowSums[i];
+        dists[i].Mean() = (observations * condProb.col(i)) / probRowSums[i];
 
       // Calculate the new value of the covariances using the updated
       // conditional probabilities and the updated means.
-      arma::mat tmp = observations - (means[i] *
+      arma::mat tmp = observations - (dists[i].Mean() *
           arma::ones<arma::rowvec>(observations.n_cols));
       arma::mat tmpB = tmp % (arma::ones<arma::vec>(observations.n_rows) *
           trans(condProb.col(i)));
 
       // Don't update if there's no probability of the Gaussian having points.
       if (probRowSums[i] != 0.0)
-        covariances[i] = (tmp * trans(tmpB)) / probRowSums[i];
+        dists[i].Covariance() = (tmp * trans(tmpB)) / probRowSums[i];
 
       // Apply covariance constraint.
-      constraint.ApplyConstraint(covariances[i]);
+      constraint.ApplyConstraint(dists[i].Covariance());
     }
 
     // Calculate the new values for omega using the updated conditional
@@ -109,7 +106,7 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
 
     // Update values of l; calculate new log-likelihood.
     lOld = l;
-    l = LogLikelihood(observations, means, covariances, weights);
+    l = LogLikelihood(observations, dists, weights);
 
     iteration++;
   }
@@ -119,21 +116,20 @@ template<typename InitialClusteringType, typename CovarianceConstraintPolicy>
 void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
     const arma::mat& observations,
     const arma::vec& probabilities,
-    std::vector<arma::vec>& means,
-    std::vector<arma::mat>& covariances,
+    std::vector<distribution::GaussianDistribution>& dists,
     arma::vec& weights,
     const bool useInitialModel)
 {
   if (!useInitialModel)
-    InitialClustering(observations, means, covariances, weights);
+    InitialClustering(observations, dists, weights);
 
-  double l = LogLikelihood(observations, means, covariances, weights);
+  double l = LogLikelihood(observations, dists, weights);
 
   Log::Debug << "EMFit::Estimate(): initial clustering log-likelihood: "
       << l << std::endl;
 
   double lOld = -DBL_MAX;
-  arma::mat condProb(observations.n_cols, means.size());
+  arma::mat condProb(observations.n_cols, dists.size());
 
   // Iterate to update the model until no more improvement is found.
   size_t iteration = 1;
@@ -141,12 +137,12 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
   {
     // Calculate the conditional probabilities of choosing a particular
     // Gaussian given the observations and the present theta value.
-    for (size_t i = 0; i < means.size(); i++)
+    for (size_t i = 0; i < dists.size(); i++)
     {
       // Store conditional probabilities into condProb vector for each
       // Gaussian.  First we make an alias of the condProb vector.
       arma::vec condProbAlias = condProb.unsafe_col(i);
-      phi(observations, means[i], covariances[i], condProbAlias);
+      dists[i].Probability(observations, condProbAlias);
       condProbAlias *= weights[i];
     }
 
@@ -162,11 +158,11 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
 
     // This will store the sum of probabilities of each state over all the
     // observations.
-    arma::vec probRowSums(means.size());
+    arma::vec probRowSums(dists.size());
 
     // Calculate the new value of the means using the updated conditional
     // probabilities.
-    for (size_t i = 0; i < means.size(); i++)
+    for (size_t i = 0; i < dists.size(); i++)
     {
       // Calculate the sum of probabilities of points, which is the
       // conditional probability of each point being from Gaussian i
@@ -174,20 +170,20 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
       // model.
       probRowSums[i] = accu(condProb.col(i) % probabilities);
 
-      means[i] = (observations * (condProb.col(i) % probabilities)) /
+      dists[i].Mean() = (observations * (condProb.col(i) % probabilities)) /
         probRowSums[i];
 
       // Calculate the new value of the covariances using the updated
       // conditional probabilities and the updated means.
-      arma::mat tmp = observations - (means[i] *
+      arma::mat tmp = observations - (dists[i].Mean() *
           arma::ones<arma::rowvec>(observations.n_cols));
       arma::mat tmpB = tmp % (arma::ones<arma::vec>(observations.n_rows) *
           trans(condProb.col(i) % probabilities));
 
-      covariances[i] = (tmp * trans(tmpB)) / probRowSums[i];
+      dists[i].Covariance() = (tmp * trans(tmpB)) / probRowSums[i];
 
       // Apply covariance constraint.
-      constraint.ApplyConstraint(covariances[i]);
+      constraint.ApplyConstraint(dists[i].Covariance());
     }
 
     // Calculate the new values for omega using the updated conditional
@@ -196,7 +192,7 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
 
     // Update values of l; calculate new log-likelihood.
     lOld = l;
-    l = LogLikelihood(observations, means, covariances, weights);
+    l = LogLikelihood(observations, dists, weights);
 
     iteration++;
   }
@@ -205,22 +201,21 @@ void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Estimate(
 template<typename InitialClusteringType, typename CovarianceConstraintPolicy>
 void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::
 InitialClustering(const arma::mat& observations,
-                  std::vector<arma::vec>& means,
-                  std::vector<arma::mat>& covariances,
+                  std::vector<distribution::GaussianDistribution>& dists,
                   arma::vec& weights)
 {
   // Assignments from clustering.
   arma::Col<size_t> assignments;
 
   // Run clustering algorithm.
-  clusterer.Cluster(observations, means.size(), assignments);
+  clusterer.Cluster(observations, dists.size(), assignments);
 
   // Now calculate the means, covariances, and weights.
   weights.zeros();
-  for (size_t i = 0; i < means.size(); ++i)
+  for (size_t i = 0; i < dists.size(); ++i)
   {
-    means[i].zeros();
-    covariances[i].zeros();
+    dists[i].Mean().zeros();
+    dists[i].Covariance().zeros();
   }
 
   // From the assignments, generate our means, covariances, and weights.
@@ -229,37 +224,35 @@ InitialClustering(const arma::mat& observations,
     const size_t cluster = assignments[i];
 
     // Add this to the relevant mean.
-    means[cluster] += observations.col(i);
+    dists[cluster].Mean() += observations.col(i);
 
     // Add this to the relevant covariance.
-//    covariances[cluster] += observations.col(i) * trans(observations.col(i));
+    dists[cluster].Covariance() += observations.col(i) *
+        trans(observations.col(i));
 
     // Now add one to the weights (we will normalize).
     weights[cluster]++;
   }
 
   // Now normalize the mean and covariance.
-  for (size_t i = 0; i < means.size(); ++i)
+  for (size_t i = 0; i < dists.size(); ++i)
   {
-//    covariances[i] -= means[i] * trans(means[i]);
-
-    means[i] /= (weights[i] > 1) ? weights[i] : 1;
-//    covariances[i] /= (weights[i] > 1) ? weights[i] : 1;
+    dists[i].Mean() /= (weights[i] > 1) ? weights[i] : 1;
   }
 
   for (size_t i = 0; i < observations.n_cols; ++i)
   {
     const size_t cluster = assignments[i];
-    const arma::vec normObs = observations.col(i) - means[cluster];
-    covariances[cluster] += normObs * normObs.t();
+    const arma::vec normObs = observations.col(i) - dists[cluster].Mean();
+    dists[cluster].Covariance() += normObs * normObs.t();
   }
 
-  for (size_t i = 0; i < means.size(); ++i)
+  for (size_t i = 0; i < dists.size(); ++i)
   {
-    covariances[i] /= (weights[i] > 1) ? weights[i] : 1;
+    dists[i].Covariance() /= (weights[i] > 1) ? weights[i] : 1;
 
     // Apply constraints to covariance matrix.
-    constraint.ApplyConstraint(covariances[i]);
+    constraint.ApplyConstraint(dists[i].Covariance());
   }
 
   // Finally, normalize weights.
@@ -269,20 +262,19 @@ InitialClustering(const arma::mat& observations,
 template<typename InitialClusteringType, typename CovarianceConstraintPolicy>
 double EMFit<InitialClusteringType, CovarianceConstraintPolicy>::LogLikelihood(
     const arma::mat& observations,
-    const std::vector<arma::vec>& means,
-    const std::vector<arma::mat>& covariances,
+    const std::vector<distribution::GaussianDistribution>& dists,
     const arma::vec& weights) const
 {
   double logLikelihood = 0;
 
   arma::vec phis;
-  arma::mat likelihoods(means.size(), observations.n_cols);
-  for (size_t i = 0; i < means.size(); ++i)
+  arma::mat likelihoods(dists.size(), observations.n_cols);
+  
+  for (size_t i = 0; i < dists.size(); ++i)
   {
-    phi(observations, means[i], covariances[i], phis);
+    dists[i].Probability(observations, phis);
     likelihoods.row(i) = weights(i) * trans(phis);
   }
-
   // Now sum over every point.
   for (size_t j = 0; j < observations.n_cols; ++j)
   {
@@ -291,7 +283,7 @@ double EMFit<InitialClusteringType, CovarianceConstraintPolicy>::LogLikelihood(
           << "outlier." << std::endl;
     logLikelihood += log(accu(likelihoods.col(j)));
   }
-
+  
   return logLikelihood;
 }
 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/mlpack.git



More information about the debian-science-commits mailing list