[mlpack-svn] r17419 - mlpack/trunk/src/mlpack/tests

fastlab-svn at coffeetalk-1.cc.gatech.edu fastlab-svn at coffeetalk-1.cc.gatech.edu
Wed Nov 26 11:48:16 EST 2014


Author: rcurtin
Date: Wed Nov 26 11:48:15 2014
New Revision: 17419

Log:
Slightly widen tolerance.


Modified:
   mlpack/trunk/src/mlpack/tests/softmax_regression_test.cpp

Modified: mlpack/trunk/src/mlpack/tests/softmax_regression_test.cpp
==============================================================================
--- mlpack/trunk/src/mlpack/tests/softmax_regression_test.cpp	(original)
+++ mlpack/trunk/src/mlpack/tests/softmax_regression_test.cpp	Wed Nov 26 11:48:15 2014
@@ -22,40 +22,40 @@
   const size_t trials = 50;
   const size_t inputSize = 10;
   const size_t numClasses = 5;
-  
+
   // Initialize a random dataset.
   arma::mat data;
   data.randu(inputSize, points);
-  
+
   // Create random class labels.
   arma::vec labels(points);
   for(size_t i = 0; i < points; i++)
     labels(i) = math::RandInt(0, numClasses);
-  
+
   // Create a SoftmaxRegressionFunction. Regularization term ignored.
   SoftmaxRegressionFunction srf(data, labels, inputSize, numClasses, 0);
-  
+
   // Run a number of trials.
   for(size_t i = 0; i < trials; i++)
   {
     // Create a random set of parameters.
     arma::mat parameters;
     parameters.randu(numClasses, inputSize);
-    
+
     double logLikelihood = 0;
-    
+
     // Compute error for each training example.
     for(size_t j = 0; j < points; j++)
     {
       arma::mat hypothesis, probabilities;
-      
+
       hypothesis = arma::exp(parameters * data.col(j));
       probabilities = hypothesis / arma::accu(hypothesis);
-      
+
       logLikelihood += log(probabilities(labels(j), 0));
     }
     logLikelihood /= points;
-    
+
     // Compare with the value returned by the function.
     BOOST_REQUIRE_CLOSE(srf.Evaluate(parameters), -logLikelihood, 1e-5);
   }
@@ -67,11 +67,11 @@
   const size_t trials = 50;
   const size_t inputSize = 10;
   const size_t numClasses = 5;
-  
+
   // Initialize a random dataset.
   arma::mat data;
   data.randu(inputSize, points);
-  
+
   // Create random class labels.
   arma::vec labels(points);
   for(size_t i = 0; i < points; i++)
@@ -81,7 +81,7 @@
   SoftmaxRegressionFunction srfNoReg(data, labels, inputSize, numClasses, 0);
   SoftmaxRegressionFunction srfSmallReg(data, labels, inputSize, numClasses, 1);
   SoftmaxRegressionFunction srfBigReg(data, labels, inputSize, numClasses, 20);
-  
+
   // Run a number of trials.
   for (size_t i = 0; i < trials; i++)
   {
@@ -108,21 +108,21 @@
   const size_t points = 1000;
   const size_t inputSize = 10;
   const size_t numClasses = 5;
-  
+
   // Initialize a random dataset.
   arma::mat data;
   data.randu(inputSize, points);
-  
+
   // Create random class labels.
   arma::vec labels(points);
   for(size_t i = 0; i < points; i++)
     labels(i) = math::RandInt(0, numClasses);
-    
+
   // 2 objects for 2 terms in the cost function. Each term contributes towards
   // the gradient and thus need to be checked independently.
   SoftmaxRegressionFunction srf1(data, labels, inputSize, numClasses, 0);
   SoftmaxRegressionFunction srf2(data, labels, inputSize, numClasses, 20);
-  
+
   // Create a random set of parameters.
   arma::mat parameters;
   parameters.randu(numClasses, inputSize);
@@ -136,7 +136,7 @@
   const double epsilon = 0.0001;
   double costPlus1, costMinus1, numGradient1;
   double costPlus2, costMinus2, numGradient2;
-  
+
   // For each parameter.
   for (size_t i = 0; i < numClasses; i++)
   {
@@ -179,7 +179,7 @@
 
   arma::mat data(inputSize, points);
   arma::vec labels(points);
-  
+
   for (size_t i = 0; i < points/2; i++)
   {
     data.col(i) = g1.Random();
@@ -196,7 +196,7 @@
 
   // Compare training accuracy to 100.
   const double acc = sr.ComputeAccuracy(data, labels);
-  BOOST_REQUIRE_CLOSE(acc, 100.0, 0.3);
+  BOOST_REQUIRE_CLOSE(acc, 100.0, 0.5);
 
   // Create test dataset.
   for (size_t i = 0; i < points/2; i++)
@@ -232,7 +232,7 @@
 
   arma::mat data(inputSize, points);
   arma::vec labels(points);
-  
+
   for (size_t i = 0; i < points/5; i++)
   {
     data.col(i) = g1.Random();



More information about the mlpack-svn mailing list