[mlpack-git] master: Add tests for new Train() functions. (5866674)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Tue Sep 29 09:34:00 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/cbeb3ea17262b7c5115247dc217e316c529249b7...f85a9b22f3ce56143943a2488c05c2810d6b2bf3

>---------------------------------------------------------------

commit 58666745b832274f1d9bf7a14564cb967174d591
Author: Ryan Curtin <ryan at ratml.org>
Date:   Mon Sep 28 17:22:43 2015 -0400

    Add tests for new Train() functions.


>---------------------------------------------------------------

58666745b832274f1d9bf7a14564cb967174d591
 src/mlpack/tests/softmax_regression_test.cpp | 31 ++++++++++++++++++----------
 1 file changed, 20 insertions(+), 11 deletions(-)

diff --git a/src/mlpack/tests/softmax_regression_test.cpp b/src/mlpack/tests/softmax_regression_test.cpp
index 18bf3f5..e1e57c7 100644
--- a/src/mlpack/tests/softmax_regression_test.cpp
+++ b/src/mlpack/tests/softmax_regression_test.cpp
@@ -353,8 +353,17 @@ BOOST_AUTO_TEST_CASE(SoftmaxRegressionTrainTest)
     labels[i] = 0.0;
   for (size_t i = 500; i < 1000; ++i)
     labels[i] = 1.0;
-  SoftmaxRegression<> sr(dataset, labels, dataset.n_rows, 2);
+
+
+  // This should be the same as the default parameters given by
+  // SoftmaxRegression.
+  SoftmaxRegressionFunction srf(dataset, labels, dataset.n_rows, 2, 0.0001,
+      false);
+  L_BFGS<SoftmaxRegressionFunction> lbfgs(srf);
+  SoftmaxRegression<> sr(lbfgs);
+
   SoftmaxRegression<> sr2(dataset.n_rows, 2);
+  sr2.Parameters() = srf.GetInitialPoint(); // Start from the same place.
   sr2.Train(dataset, labels, 2);
 
   // Ensure that the parameters are the same.
@@ -362,10 +371,10 @@ BOOST_AUTO_TEST_CASE(SoftmaxRegressionTrainTest)
   BOOST_REQUIRE_EQUAL(sr.Parameters().n_cols, sr2.Parameters().n_cols);
   for (size_t i = 0; i < sr.Parameters().n_elem; ++i)
   {
-    if (std::abs(sr.Parameters()[i]) < 1e-5)
-      BOOST_REQUIRE_SMALL(sr2.Parameters()[i], 1e-5);
+    if (std::abs(sr.Parameters()[i]) < 1e-4)
+      BOOST_REQUIRE_SMALL(sr2.Parameters()[i], 1e-4);
     else
-      BOOST_REQUIRE_CLOSE(sr.Parameters()[i], sr2.Parameters()[i], 1e-5);
+      BOOST_REQUIRE_CLOSE(sr.Parameters()[i], sr2.Parameters()[i], 1e-4);
   }
 }
 
@@ -381,22 +390,22 @@ BOOST_AUTO_TEST_CASE(SoftmaxRegressionOptimizerTrainTest)
 
   SoftmaxRegressionFunction srf(dataset, labels, dataset.n_rows, 2, 0.01, true);
   L_BFGS<SoftmaxRegressionFunction> lbfgs(srf);
-
   SoftmaxRegression<> sr(lbfgs);
-  SoftmaxRegression<> sr2(dataset.n_rows, 2);
-  sr2.Train(lbfgs);
+
+  SoftmaxRegression<> sr2(dataset.n_rows, 2, true);
+  L_BFGS<SoftmaxRegressionFunction> lbfgs2(srf);
+  sr2.Train(lbfgs2);
 
   // Ensure that the parameters are the same.
   BOOST_REQUIRE_EQUAL(sr.Parameters().n_rows, sr2.Parameters().n_rows);
   BOOST_REQUIRE_EQUAL(sr.Parameters().n_cols, sr2.Parameters().n_cols);
   for (size_t i = 0; i < sr.Parameters().n_elem; ++i)
   {
-    if (std::abs(sr.Parameters()[i]) < 1e-5)
-      BOOST_REQUIRE_SMALL(sr2.Parameters()[i], 1e-5);
+    if (std::abs(sr.Parameters()[i]) < 0.01)
+      BOOST_REQUIRE_SMALL(sr2.Parameters()[i], 0.01);
     else
-      BOOST_REQUIRE_CLOSE(sr.Parameters()[i], sr2.Parameters()[i], 1e-5);
+      BOOST_REQUIRE_CLOSE(sr.Parameters()[i], sr2.Parameters()[i], 0.01);
   }
 }
 
-
 BOOST_AUTO_TEST_SUITE_END();



More information about the mlpack-git mailing list