[mlpack-svn] r16968 - mlpack/trunk/src/mlpack/tests

fastlab-svn at coffeetalk-1.cc.gatech.edu fastlab-svn at coffeetalk-1.cc.gatech.edu
Tue Aug 5 09:44:21 EDT 2014


Author: michaelfox99
Date: Tue Aug  5 09:44:21 2014
New Revision: 16968

Log:
Updated Save, Load tests and updated tests for hierarchical format


Modified:
   mlpack/trunk/src/mlpack/tests/gmm_test.cpp

Modified: mlpack/trunk/src/mlpack/tests/gmm_test.cpp
==============================================================================
--- mlpack/trunk/src/mlpack/tests/gmm_test.cpp	(original)
+++ mlpack/trunk/src/mlpack/tests/gmm_test.cpp	Tue Aug  5 09:44:21 2014
@@ -1,13 +1,13 @@
 /**
  * @file gmm_test.cpp
  * @author Ryan Curtin
+ * @author Michael Fox
  *
  * Test for the Gaussian Mixture Model class.
  */
 #include <mlpack/core.hpp>
 
 #include <mlpack/methods/gmm/gmm.hpp>
-#include <mlpack/methods/gmm/phi.hpp>
 
 #include <mlpack/methods/gmm/no_constraint.hpp>
 #include <mlpack/methods/gmm/positive_definite_constraint.hpp>
@@ -22,118 +22,14 @@
 
 BOOST_AUTO_TEST_SUITE(GMMTest);
 /**
- * Test the phi() function, in the univariate Gaussian case.
- */
-BOOST_AUTO_TEST_CASE(UnivariatePhiTest)
-{
-  // Simple case.
-  BOOST_REQUIRE_CLOSE(phi(0.0, 0.0, 1.0), 0.398942280401433, 1e-5);
-
-  // A few more cases...
-  BOOST_REQUIRE_CLOSE(phi(0.0, 0.0, 2.0), 0.282094791773878, 1e-5);
-
-  BOOST_REQUIRE_CLOSE(phi(1.0, 0.0, 1.0), 0.241970724519143, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-1.0, 0.0, 1.0), 0.241970724519143, 1e-5);
-
-  BOOST_REQUIRE_CLOSE(phi(1.0, 0.0, 2.0), 0.219695644733861, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-1.0, 0.0, 2.0), 0.219695644733861, 1e-5);
-
-  BOOST_REQUIRE_CLOSE(phi(1.0, 1.0, 1.0), 0.398942280401433, 1e-5);
-
-  BOOST_REQUIRE_CLOSE(phi(-1.0, 1.0, 2.0), 0.103776874355149, 1e-5);
-}
-
-/**
- * Test the phi() function, in the multivariate Gaussian case.
- */
-BOOST_AUTO_TEST_CASE(MultivariatePhiTest)
-{
-  // Simple case.
-  arma::vec mean = "0 0";
-  arma::mat cov = "1 0; 0 1";
-  arma::vec x = "0 0";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 0.159154943091895, 1e-5);
-
-  cov = "2 0; 0 2";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 0.0795774715459477, 1e-5);
-
-  x = "1 1";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 0.0482661763150270, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-x, mean, cov), 0.0482661763150270, 1e-5);
-
-  mean = "1 1";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 0.0795774715459477, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-x, -mean, cov), 0.0795774715459477, 1e-5);
-
-  cov = "2 1.5; 1 4";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 0.0624257046546403, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-x, -mean, cov), 0.0624257046546403, 1e-5);
-
-  x = "-1 4";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 0.00144014867515135, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-x, mean, cov), 0.00133352162064845, 1e-5);
-
-  // Higher-dimensional case.
-  x = "0 1 2 3 4";
-  mean = "5 6 3 3 2";
-  cov = "6 1 1 0 2;"
-        "0 7 1 0 1;"
-        "1 1 4 1 1;"
-        "1 0 1 7 0;"
-        "2 0 1 1 6";
-
-  BOOST_REQUIRE_CLOSE(phi(x, mean, cov), 1.02531207499358e-6, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-x, -mean, cov), 1.02531207499358e-6, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(x, -mean, cov), 1.06784794079363e-8, 1e-5);
-  BOOST_REQUIRE_CLOSE(phi(-x, mean, cov), 1.06784794079363e-8, 1e-5);
-}
-
-/**
- * Test the phi() function, for multiple points in the multivariate Gaussian
- * case.
- */
-BOOST_AUTO_TEST_CASE(MultipointMultivariatePhiTest)
-{
-  // Same case as before.
-  arma::vec mean = "5 6 3 3 2";
-  arma::mat cov = "6 1 1 0 2; 0 7 1 0 1; 1 1 4 1 1; 1 0 1 7 0; 2 0 1 1 6";
-
-  arma::mat points = "0 3 2 2 3 4;"
-                     "1 2 2 1 0 0;"
-                     "2 3 0 5 5 6;"
-                     "3 7 8 0 1 1;"
-                     "4 8 1 1 0 0;";
-
-  arma::vec phis;
-  phi(points, mean, cov, phis);
-
-  BOOST_REQUIRE_EQUAL(phis.n_elem, 6);
-
-  BOOST_REQUIRE_CLOSE(phis(0), 1.02531207499358e-6, 1e-5);
-  BOOST_REQUIRE_CLOSE(phis(1), 1.82353695848039e-7, 1e-5);
-  BOOST_REQUIRE_CLOSE(phis(2), 1.29759261892949e-6, 1e-5);
-  BOOST_REQUIRE_CLOSE(phis(3), 1.33218060268258e-6, 1e-5);
-  BOOST_REQUIRE_CLOSE(phis(4), 1.12120427975708e-6, 1e-5);
-  BOOST_REQUIRE_CLOSE(phis(5), 4.57951032485297e-7, 1e-5);
-}
-
-/**
  * Test GMM::Probability() for a single observation for a few cases.
  */
 BOOST_AUTO_TEST_CASE(GMMProbabilityTest)
 {
   // Create a GMM.
   GMM<> gmm(2, 2);
-  gmm.Means()[0] = "0 0";
-  gmm.Means()[1] = "3 3";
-  gmm.Covariances()[0] = "1 0; 0 1";
-  gmm.Covariances()[1] = "2 1; 1 2";
+	gmm.Component(0) = distribution::GaussianDistribution("0 0", "1 0; 0 1");
+	gmm.Component(1) = distribution::GaussianDistribution("3 3", "2 1; 1 2");
   gmm.Weights() = "0.3 0.7";
 
   // Now test a couple observations.  These comparisons are calculated by hand.
@@ -153,10 +49,8 @@
 {
   // Create a GMM (same as the last test).
   GMM<> gmm(2, 2);
-  gmm.Means()[0] = "0 0";
-  gmm.Means()[1] = "3 3";
-  gmm.Covariances()[0] = "1 0; 0 1";
-  gmm.Covariances()[1] = "2 1; 1 2";
+	gmm.Component(0) = distribution::GaussianDistribution("0 0", "1 0; 0 1");
+	gmm.Component(1) = distribution::GaussianDistribution("3 3", "2 1; 1 2");
   gmm.Weights() = "0.3 0.7";
 
   // Now test a couple observations.  These comparisons are calculated by hand.
@@ -212,13 +106,17 @@
     arma::mat actualCovar = ccov(data, 1 /* biased estimator */);
 
     // Check the model to see that it is correct.
-    BOOST_REQUIRE_CLOSE((gmm.Means()[0])[0], actualMean(0), 1e-5);
-    BOOST_REQUIRE_CLOSE((gmm.Means()[0])[1], actualMean(1), 1e-5);
-
-    BOOST_REQUIRE_CLOSE((gmm.Covariances()[0])(0, 0), actualCovar(0, 0), 1e-5);
-    BOOST_REQUIRE_CLOSE((gmm.Covariances()[0])(0, 1), actualCovar(0, 1), 1e-5);
-    BOOST_REQUIRE_CLOSE((gmm.Covariances()[0])(1, 0), actualCovar(1, 0), 1e-5);
-    BOOST_REQUIRE_CLOSE((gmm.Covariances()[0])(1, 1), actualCovar(1, 1), 1e-5);
+		BOOST_REQUIRE_CLOSE(gmm.Component(0).Mean()[0], actualMean(0), 1e-5);
+		BOOST_REQUIRE_CLOSE(gmm.Component(0).Mean()[1], actualMean(1), 1e-5);
+		
+		BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(0, 0),
+		    actualCovar(0, 0), 1e-5);
+		BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(0, 1),
+		    actualCovar(0, 1), 1e-5);
+		BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(1, 0),
+		    actualCovar(1, 0), 1e-5);
+		BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(1, 1),
+		    actualCovar(1, 1), 1e-5);		
 
     BOOST_REQUIRE_CLOSE(gmm.Weights()[0], 1.0, 1e-5);
   }
@@ -306,13 +204,13 @@
   {
     // Check the mean.
     for (size_t j = 0; j < dims; j++)
-      BOOST_REQUIRE_CLOSE((gmm.Means()[sortTry[i]])[j],
+      BOOST_REQUIRE_CLOSE(gmm.Component(sortTry[i]).Mean()[j],
           (means[sortRef[i]])[j], 1e-5);
 
     // Check the covariance.
     for (size_t row = 0; row < dims; row++)
       for (size_t col = 0; col < dims; col++)
-        BOOST_REQUIRE_CLOSE((gmm.Covariances()[sortTry[i]])(row, col),
+        BOOST_REQUIRE_CLOSE(gmm.Component(sortTry[i]).Covariance()(row, col),
             (covars[sortRef[i]])(row, col), 1e-5);
 
     // Check the weight.
@@ -345,14 +243,14 @@
 
   // Check that it is trained correctly.  5% tolerance because of random error
   // present in observations.
-  BOOST_REQUIRE_CLOSE(g.Means()[0][0], 0.5, 5.0);
-  BOOST_REQUIRE_CLOSE(g.Means()[0][1], 1.0, 5.0);
+  BOOST_REQUIRE_CLOSE(g.Component(0).Mean()[0], 0.5, 5.0);
+  BOOST_REQUIRE_CLOSE(g.Component(0).Mean()[1], 1.0, 5.0);
 
   // 6% tolerance on the large numbers, 10% on the smaller numbers.
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](0, 0), 1.0, 6.0);
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](0, 1), 0.3, 10.0);
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](1, 0), 0.3, 10.0);
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](1, 1), 1.0, 6.0);
+  BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(0, 0), 1.0, 6.0);
+  BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(0, 1), 0.3, 10.0);
+  BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(1, 0), 0.3, 10.0);
+  BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(1, 1), 1.0, 6.0);
 
   BOOST_REQUIRE_CLOSE(g.Weights()[0], 1.0, 1e-5);
 }
@@ -427,45 +325,49 @@
   BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[0]] - 0.1, 0.1);
 
   for (size_t i = 0; i < 3; i++)
-    BOOST_REQUIRE_SMALL((g.Means()[sortedIndices[0]][i] - d4.Mean()[i]), 0.4);
+    BOOST_REQUIRE_SMALL((g.Component(sortedIndices[0]).Mean()[i]
+		    - d4.Mean()[i]), 0.4);
 
   for (size_t row = 0; row < 3; row++)
     for (size_t col = 0; col < 3; col++)
-      BOOST_REQUIRE_SMALL((g.Covariances()[sortedIndices[0]](row, col) -
-          d4.Covariance()(row, col)), 0.60); // Big tolerance!  Lots of noise.
+      BOOST_REQUIRE_SMALL((g.Component(sortedIndices[0]).Covariance()(row, col)
+			    - d4.Covariance()(row, col)), 0.60); // Big tolerance!  Lots of noise.
 
   // Second Gaussian (d1).
   BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[1]] - 0.2, 0.1);
 
   for (size_t i = 0; i < 3; i++)
-    BOOST_REQUIRE_SMALL((g.Means()[sortedIndices[1]][i] - d1.Mean()[i]), 0.4);
+    BOOST_REQUIRE_SMALL((g.Component(sortedIndices[1]).Mean()[i]
+		    - d1.Mean()[i]), 0.4);
 
   for (size_t row = 0; row < 3; row++)
     for (size_t col = 0; col < 3; col++)
-      BOOST_REQUIRE_SMALL((g.Covariances()[sortedIndices[1]](row, col) -
-          d1.Covariance()(row, col)), 0.55); // Big tolerance!  Lots of noise.
+      BOOST_REQUIRE_SMALL((g.Component(sortedIndices[1]).Covariance()(row, col)
+			    - d1.Covariance()(row, col)), 0.55); // Big tolerance!  Lots of noise.
 
   // Third Gaussian (d2).
   BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[2]] - 0.3, 0.1);
 
   for (size_t i = 0; i < 3; i++)
-    BOOST_REQUIRE_SMALL((g.Means()[sortedIndices[2]][i] - d2.Mean()[i]), 0.4);
+    BOOST_REQUIRE_SMALL((g.Component(sortedIndices[2]).Mean()[i]
+		    - d2.Mean()[i]), 0.4);
 
   for (size_t row = 0; row < 3; row++)
     for (size_t col = 0; col < 3; col++)
-      BOOST_REQUIRE_SMALL((g.Covariances()[sortedIndices[2]](row, col) -
-          d2.Covariance()(row, col)), 0.50); // Big tolerance!  Lots of noise.
+      BOOST_REQUIRE_SMALL((g.Component(sortedIndices[2]).Covariance()(row, col)
+			    - d2.Covariance()(row, col)), 0.50); // Big tolerance!  Lots of noise.
 
   // Fourth gaussian (d3).
   BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[3]] - 0.4, 0.1);
 
   for (size_t i = 0; i < 3; ++i)
-    BOOST_REQUIRE_SMALL((g.Means()[sortedIndices[3]][i] - d3.Mean()[i]), 0.4);
+    BOOST_REQUIRE_SMALL((g.Component(sortedIndices[3]).Mean()[i]
+	      - d3.Mean()[i]), 0.4);
 
   for (size_t row = 0; row < 3; ++row)
     for (size_t col = 0; col < 3; ++col)
-      BOOST_REQUIRE_SMALL((g.Covariances()[sortedIndices[3]](row, col) -
-          d3.Covariance()(row, col)), 0.50);
+      BOOST_REQUIRE_SMALL((g.Component(sortedIndices[3]).Covariance()(row, col)
+			    - d3.Covariance()(row, col)), 0.50);
 }
 
 /**
@@ -480,12 +382,13 @@
   gmm.Weights() = arma::vec("0.40 0.60");
 
   // N([2.25 3.10], [1.00 0.20; 0.20 0.89])
-  gmm.Means()[0] = arma::vec("2.25 3.10");
-  gmm.Covariances()[0] = arma::mat("1.00 0.60; 0.60 0.89");
+	gmm.Component(0) = distribution::GaussianDistribution("2.25 3.10",
+	    "1.00 0.60; 0.60 0.89");
+
 
   // N([4.10 1.01], [1.00 0.00; 0.00 1.01])
-  gmm.Means()[1] = arma::vec("4.10 1.01");
-  gmm.Covariances()[1] = arma::mat("1.00 0.70; 0.70 1.01");
+	gmm.Component(1) = distribution::GaussianDistribution("4.10 1.01",
+	    "1.00 0.70; 0.70 1.01");
 
   // Now generate a bunch of observations.
   arma::mat observations(2, 4000);
@@ -505,33 +408,33 @@
   BOOST_REQUIRE_CLOSE(gmm.Weights()[0], gmm2.Weights()[sortedIndices[0]], 7.0);
   BOOST_REQUIRE_CLOSE(gmm.Weights()[1], gmm2.Weights()[sortedIndices[1]], 7.0);
 
-  BOOST_REQUIRE_CLOSE(gmm.Means()[0][0], gmm2.Means()[sortedIndices[0]][0],
-      6.5);
-  BOOST_REQUIRE_CLOSE(gmm.Means()[0][1], gmm2.Means()[sortedIndices[0]][1],
-      6.5);
-
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[0](0, 0),
-      gmm2.Covariances()[sortedIndices[0]](0, 0), 13.0);
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[0](0, 1),
-      gmm2.Covariances()[sortedIndices[0]](0, 1), 22.0);
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[0](1, 0),
-      gmm2.Covariances()[sortedIndices[0]](1, 0), 22.0);
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[0](1, 1),
-      gmm2.Covariances()[sortedIndices[0]](1, 1), 13.0);
-
-  BOOST_REQUIRE_CLOSE(gmm.Means()[1][0], gmm2.Means()[sortedIndices[1]][0],
-      6.5);
-  BOOST_REQUIRE_CLOSE(gmm.Means()[1][1], gmm2.Means()[sortedIndices[1]][1],
-      6.5);
-
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[1](0, 0),
-      gmm2.Covariances()[sortedIndices[1]](0, 0), 13.0);
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[1](0, 1),
-      gmm2.Covariances()[sortedIndices[1]](0, 1), 22.0);
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[1](1, 0),
-      gmm2.Covariances()[sortedIndices[1]](1, 0), 22.0);
-  BOOST_REQUIRE_CLOSE(gmm.Covariances()[1](1, 1),
-      gmm2.Covariances()[sortedIndices[1]](1, 1), 13.0);
+  BOOST_REQUIRE_CLOSE(gmm.Component(0).Mean()[0],
+	    gmm2.Component(sortedIndices[0]).Mean()[0], 6.5);
+  BOOST_REQUIRE_CLOSE(gmm.Component(0).Mean()[1],
+	    gmm2.Component(sortedIndices[0]).Mean()[1], 6.5);
+
+  BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(0, 0),
+      gmm2.Component(sortedIndices[0]).Covariance()(0, 0), 13.0);	
+  BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(0, 1),
+      gmm2.Component(sortedIndices[0]).Covariance()(0, 1), 22.0);	
+  BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(1, 0),
+      gmm2.Component(sortedIndices[0]).Covariance()(1, 0), 22.0);	
+  BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(1, 1),
+      gmm2.Component(sortedIndices[0]).Covariance()(1, 1), 13.0);	
+	
+  BOOST_REQUIRE_CLOSE(gmm.Component(1).Mean()[0],
+	    gmm2.Component(sortedIndices[1]).Mean()[0], 6.5);
+  BOOST_REQUIRE_CLOSE(gmm.Component(1).Mean()[1],
+	    gmm2.Component(sortedIndices[1]).Mean()[1], 6.5);
+
+  BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(0, 0),
+      gmm2.Component(sortedIndices[1]).Covariance()(0, 0), 13.0);	
+  BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(0, 1),
+      gmm2.Component(sortedIndices[1]).Covariance()(0, 1), 22.0);	
+  BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(1, 0),
+      gmm2.Component(sortedIndices[1]).Covariance()(1, 0), 22.0);	
+  BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(1, 1),
+      gmm2.Component(sortedIndices[1]).Covariance()(1, 1), 13.0);	
 }
 
 /**
@@ -541,13 +444,11 @@
 {
   // First create a Gaussian with a few components.
   GMM<> gmm(3, 2);
-  gmm.Means()[0] = "0 0";
-  gmm.Means()[1] = "1 3";
-  gmm.Means()[2] = "-2 -2";
-  gmm.Covariances()[0] = "1 0; 0 1";
-  gmm.Covariances()[1] = "3 2; 2 3";
-  gmm.Covariances()[2] = "2.2 1.4; 1.4 5.1";
-  gmm.Weights() = "0.6 0.25 0.15";
+	gmm.Component(0) = distribution::GaussianDistribution("0 0", "1 0; 0 1");
+	gmm.Component(1) = distribution::GaussianDistribution("1 3", "3 2; 2 3");
+	gmm.Component(2) = distribution::GaussianDistribution("-2 -2",
+      "2.2 1.4; 1.4 5.1");
+	gmm.Weights() = "0.6 0.25 0.15";
 
   arma::mat observations = arma::trans(arma::mat(
     " 0  0;"
@@ -592,8 +493,8 @@
 
   for (size_t i = 0; i < gmm.Gaussians(); ++i)
   {
-    gmm.Means()[i].randu();
-    gmm.Covariances()[i].randu();
+    gmm.Component(i).Mean().randu();
+    gmm.Component(i).Covariance().randu();
   }
 
   gmm.Save("test-gmm-save.xml");
@@ -613,14 +514,15 @@
   for (size_t i = 0; i < gmm.Gaussians(); ++i)
   {
     for (size_t j = 0; j < gmm.Dimensionality(); ++j)
-      BOOST_REQUIRE_CLOSE(gmm.Means()[i][j], gmm2.Means()[i][j], 1e-3);
+      BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j],
+		      gmm2.Component(i).Mean()[j], 1e-3);
 
     for (size_t j = 0; j < gmm.Dimensionality(); ++j)
     {
       for (size_t k = 0; k < gmm.Dimensionality(); ++k)
       {
-        BOOST_REQUIRE_CLOSE(gmm.Covariances()[i](j, k),
-            gmm2.Covariances()[i](j, k), 1e-3);
+        BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k),
+            gmm2.Component(i).Covariance()(j, k), 1e-3);
       }
     }
   }
@@ -784,11 +686,12 @@
 
     for (size_t j = 0; j < gmm.Dimensionality(); ++j)
     {
-      BOOST_REQUIRE_CLOSE(gmm.Means()[i][j], oldgmm.Means()[i][j], 1e-3);
+      BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j],
+			                    oldgmm.Component(i).Mean()[j], 1e-3);
 
       for (size_t k = 0; k < gmm.Dimensionality(); ++k)
-        BOOST_REQUIRE_CLOSE(gmm.Covariances()[i](j, k),
-                            oldgmm.Covariances()[i](j, k), 1e-3);
+        BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k),
+                            oldgmm.Component(i).Covariance()(j, k), 1e-3);
     }
   }
 
@@ -805,11 +708,12 @@
 
     for (size_t j = 0; j < gmm.Dimensionality(); ++j)
     {
-      BOOST_REQUIRE_CLOSE(gmm.Means()[i][j], oldgmm.Means()[i][j], 1e-3);
+      BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j],
+		                      oldgmm.Component(i).Mean()[j], 1e-3);
 
       for (size_t k = 0; k < gmm.Dimensionality(); ++k)
-        BOOST_REQUIRE_CLOSE(gmm.Covariances()[i](j, k),
-                            oldgmm.Covariances()[i](j, k), 1e-3);
+        BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k),
+                            oldgmm.Component(i).Covariance()(j, k), 1e-3);
     }
   }
 
@@ -828,11 +732,12 @@
 
     for (size_t j = 0; j < gmm.Dimensionality(); ++j)
     {
-      BOOST_REQUIRE_CLOSE(gmm.Means()[i][j], oldgmm.Means()[i][j], 1e-3);
+      BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j],
+		      oldgmm.Component(i).Mean()[j], 1e-3);
 
       for (size_t k = 0; k < gmm.Dimensionality(); ++k)
-        BOOST_REQUIRE_CLOSE(gmm.Covariances()[i](j, k),
-                            oldgmm.Covariances()[i](j, k), 1e-3);
+        BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k),
+                            oldgmm.Component(i).Covariance()(j, k), 1e-3);
     }
   }
 
@@ -847,13 +752,15 @@
 
     for (size_t j = 0; j < gmm.Dimensionality(); ++j)
     {
-      BOOST_REQUIRE_CLOSE(gmm.Means()[i][j], oldgmm.Means()[i][j], 1e-3);
+      BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j],
+			    oldgmm.Component(i).Mean()[j], 1e-3);
 
       for (size_t k = 0; k < gmm.Dimensionality(); ++k)
-        BOOST_REQUIRE_CLOSE(gmm.Covariances()[i](j, k),
-                            oldgmm.Covariances()[i](j, k), 1e-3);
+        BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k),
+                            oldgmm.Component(i).Covariance()(j, k), 1e-3);
     }
   }
 }
 
+
 BOOST_AUTO_TEST_SUITE_END();



More information about the mlpack-svn mailing list