[mlpack-svn] r15218 - mlpack/trunk/src/mlpack/tests

fastlab-svn at coffeetalk-1.cc.gatech.edu fastlab-svn at coffeetalk-1.cc.gatech.edu
Mon Jun 10 11:18:52 EDT 2013


Author: rcurtin
Date: 2013-06-10 11:18:52 -0400 (Mon, 10 Jun 2013)
New Revision: 15218

Modified:
   mlpack/trunk/src/mlpack/tests/gmm_test.cpp
Log:
Fix unused variable warnings.


Modified: mlpack/trunk/src/mlpack/tests/gmm_test.cpp
===================================================================
--- mlpack/trunk/src/mlpack/tests/gmm_test.cpp	2013-06-08 17:59:38 UTC (rev 15217)
+++ mlpack/trunk/src/mlpack/tests/gmm_test.cpp	2013-06-10 15:18:52 UTC (rev 15218)
@@ -201,7 +201,7 @@
 
     // Now, train the model.
     GMM<> gmm(1, 2);
-    double likelihood = gmm.Estimate(data, 10);
+    gmm.Estimate(data, 10);
 
     arma::vec actualMean = arma::mean(data, 1);
     arma::mat actualCovar = ccov(data, 1 /* biased estimator */);
@@ -291,7 +291,7 @@
 
   // Now train the model.
   GMM<> gmm(gaussians, dims);
-  double likelihood = gmm.Estimate(data, 10);
+  gmm.Estimate(data, 10);
 
   arma::uvec sortRef = sort_index(weights);
   arma::uvec sortTry = sort_index(gmm.Weights());
@@ -322,6 +322,8 @@
  */
 BOOST_AUTO_TEST_CASE(GMMTrainEMSingleGaussianWithProbability)
 {
+  math::RandomSeed(std::time(NULL));
+
   // Generate observations from a Gaussian distribution.
   distribution::GaussianDistribution d("0.5 1.0", "1.0 0.3; 0.3 1.0");
 
@@ -334,18 +336,18 @@
 
   // Now train the model.
   GMM<> g(1, 2);
-  double likelihood = g.Estimate(observations, probabilities, 10);
+  g.Estimate(observations, probabilities, 10);
 
   // Check that it is trained correctly.  7% tolerance because of random error
   // present in observations.
-  BOOST_REQUIRE_CLOSE(g.Means()[0][0], 0.5, 7.0);
-  BOOST_REQUIRE_CLOSE(g.Means()[0][1], 1.0, 7.0);
+  BOOST_REQUIRE_CLOSE(g.Means()[0][0], 0.5, 4.0);
+  BOOST_REQUIRE_CLOSE(g.Means()[0][1], 1.0, 4.0);
 
   // 9% tolerance on the large numbers, 12% on the smaller numbers.
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](0, 0), 1.0, 9.0);
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](0, 1), 0.3, 12.0);
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](1, 0), 0.3, 12.0);
-  BOOST_REQUIRE_CLOSE(g.Covariances()[0](1, 1), 1.0, 9.0);
+  BOOST_REQUIRE_CLOSE(g.Covariances()[0](0, 0), 1.0, 4.0);
+  BOOST_REQUIRE_CLOSE(g.Covariances()[0](0, 1), 0.3, 6.0);
+  BOOST_REQUIRE_CLOSE(g.Covariances()[0](1, 0), 0.3, 6.0);
+  BOOST_REQUIRE_CLOSE(g.Covariances()[0](1, 1), 1.0, 4.0);
 
   BOOST_REQUIRE_CLOSE(g.Weights()[0], 1.0, 1e-5);
 }
@@ -407,7 +409,7 @@
   // Now train the model.
   GMM<> g(4, 3); // 3 dimensions, 4 components.
 
-  double likelihood = g.Estimate(points, probabilities, 8);
+  g.Estimate(points, probabilities, 8);
 
   // Now check the results.  We need to order by weights so that when we do the
   // checking, things will be correct.
@@ -487,7 +489,7 @@
 
   // A new one which we'll train.
   GMM<> gmm2(2, 2);
-  double likelihood = gmm2.Estimate(observations, 10);
+  gmm2.Estimate(observations, 10);
 
   // Now check the results.  We need to order by weights so that when we do the
   // checking, things will be correct.




More information about the mlpack-svn mailing list