[mlpack-git] master, mlpack-1.0.x: Adding tests for Reg SVD. (273d61c)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Mar 5 21:54:02 EST 2015


Repository : https://github.com/mlpack/mlpack

On branches: master,mlpack-1.0.x
Link       : https://github.com/mlpack/mlpack/compare/904762495c039e345beba14c1142fd719b3bd50e...f94823c800ad6f7266995c700b1b630d5ffdcf40

>---------------------------------------------------------------

commit 273d61c6d40ce00cade7d2ea41aa803e6748ade4
Author: Siddharth Agrawal <siddharth.950 at gmail.com>
Date:   Tue Jul 22 17:54:24 2014 +0000

    Adding tests for Reg SVD.


>---------------------------------------------------------------

273d61c6d40ce00cade7d2ea41aa803e6748ade4
 .../regularized_svd/regularized_svd_function.cpp   |   8 +-
 src/mlpack/tests/CMakeLists.txt                    |   1 +
 src/mlpack/tests/regularized_svd_test.cpp          | 223 +++++++++++++++++++++
 3 files changed, 228 insertions(+), 4 deletions(-)

diff --git a/src/mlpack/methods/regularized_svd/regularized_svd_function.cpp b/src/mlpack/methods/regularized_svd/regularized_svd_function.cpp
index 55222aa..f3fca65 100644
--- a/src/mlpack/methods/regularized_svd/regularized_svd_function.cpp
+++ b/src/mlpack/methods/regularized_svd/regularized_svd_function.cpp
@@ -109,10 +109,10 @@ void RegularizedSVDFunction::Gradient(const arma::mat& parameters,
 
     // Gradient is non-zero only for the parameter columns corresponding to the
     // example.
-    gradient.col(user) += lambda * parameters.col(user) -
-                          ratingError * parameters.col(item);
-    gradient.col(item) += lambda * parameters.col(item) -
-                          ratingError * parameters.col(user);
+    gradient.col(user) += 2 * (lambda * parameters.col(user) -
+                               ratingError * parameters.col(item));
+    gradient.col(item) += 2 * (lambda * parameters.col(item) -
+                               ratingError * parameters.col(user));
   }
 }
 
diff --git a/src/mlpack/tests/CMakeLists.txt b/src/mlpack/tests/CMakeLists.txt
index 5200932..5574715 100644
--- a/src/mlpack/tests/CMakeLists.txt
+++ b/src/mlpack/tests/CMakeLists.txt
@@ -40,6 +40,7 @@ add_executable(mlpack_test
   radical_test.cpp
   range_search_test.cpp
   rectangle_tree_test.cpp
+  regularized_svd_test.cpp
   sa_test.cpp
   save_restore_utility_test.cpp
   sgd_test.cpp
diff --git a/src/mlpack/tests/regularized_svd_test.cpp b/src/mlpack/tests/regularized_svd_test.cpp
new file mode 100644
index 0000000..75fdd20
--- /dev/null
+++ b/src/mlpack/tests/regularized_svd_test.cpp
@@ -0,0 +1,223 @@
+/**
+ * @file regularized_svd_test.cpp
+ * @author Siddharth Agrawal
+ *
+ * Test the RegularizedSVDFunction class.
+ */
+#include <mlpack/core.hpp>
+#include <mlpack/methods/regularized_svd/regularized_svd.hpp>
+
+#include <boost/test/unit_test.hpp>
+#include "old_boost_test_definitions.hpp"
+
+using namespace mlpack::svd;
+
+BOOST_AUTO_TEST_SUITE(RegularizedSVDTest);
+
+BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionRandomEvaluate)
+{
+  // Define useful constants.
+  const size_t numUsers = 100;
+  const size_t numItems = 100;
+  const size_t numRatings = 1000;
+  const size_t maxRating = 5;
+  const size_t rank = 10;
+  const size_t numTrials = 50;
+  
+  // Make a random rating dataset.
+  arma::mat data = arma::randu(3, numRatings);
+  data.row(0) = floor(data.row(0) * numUsers);
+  data.row(1) = floor(data.row(1) * numItems);
+  data.row(2) = floor(data.row(2) * maxRating + 0.5);
+  
+  // Make a RegularizedSVDFunction with zero regularization.
+  RegularizedSVDFunction rSVDFunc(data, rank, 0);
+  
+  for(size_t i = 0; i < numTrials; i++)
+  {
+    arma::mat parameters = arma::randu(rank, numUsers + numItems);
+    
+    // Calculate cost by summing up cost of each example.
+    double cost = 0;
+    for(size_t j = 0; j < numRatings; j++)
+    {
+      const size_t user = data(0, j);
+      const size_t item = data(1, j) + numUsers;
+      
+      const double rating = data(2, j);
+      double ratingError = rating - arma::dot(parameters.col(user),
+                                              parameters.col(item));
+      double ratingErrorSquared = ratingError * ratingError;
+      
+      cost += ratingErrorSquared;
+    }
+    
+    // Compare calculated cost and value obtained using Evaluate().
+    BOOST_REQUIRE_CLOSE(cost, rSVDFunc.Evaluate(parameters), 1e-5);
+  }
+}
+
+BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionRegularizationEvaluate)
+{
+  // Define useful constants.
+  const size_t numUsers = 100;
+  const size_t numItems = 100;
+  const size_t numRatings = 1000;
+  const size_t maxRating = 5;
+  const size_t rank = 10;
+  const size_t numTrials = 50;
+  
+  // Make a random rating dataset.
+  arma::mat data = arma::randu(3, numRatings);
+  data.row(0) = floor(data.row(0) * numUsers);
+  data.row(1) = floor(data.row(1) * numItems);
+  data.row(2) = floor(data.row(2) * maxRating + 0.5);
+  
+  // Make three RegularizedSVDFunction objects with different amounts of
+  // regularization.
+  RegularizedSVDFunction rSVDFuncNoReg(data, rank, 0);
+  RegularizedSVDFunction rSVDFuncSmallReg(data, rank, 0.5);
+  RegularizedSVDFunction rSVDFuncBigReg(data, rank, 20);
+  
+  for(size_t i = 0; i < numTrials; i++)
+  {
+    arma::mat parameters = arma::randu(rank, numUsers + numItems);
+    
+    // Calculate the regularization contributions of parameters corresponding to
+    // each rating and sum them up.
+    double smallRegTerm = 0;
+    double bigRegTerm = 0;
+    for(size_t j = 0; j < numRatings; j++)
+    {
+      const size_t user = data(0, j);
+      const size_t item = data(1, j) + numUsers;
+      
+      double userVecNorm = arma::norm(parameters.col(user), 2);
+      double itemVecNorm = arma::norm(parameters.col(item), 2);
+      smallRegTerm += 0.5 * (userVecNorm * userVecNorm +
+                             itemVecNorm * itemVecNorm);
+      bigRegTerm += 20 * (userVecNorm * userVecNorm +
+                          itemVecNorm * itemVecNorm);
+    }
+    
+    // Cost with regularization should be close to the sum of cost without
+    // regularization and the regularization terms.
+    BOOST_REQUIRE_CLOSE(rSVDFuncNoReg.Evaluate(parameters) + smallRegTerm,
+        rSVDFuncSmallReg.Evaluate(parameters), 1e-5);
+    BOOST_REQUIRE_CLOSE(rSVDFuncNoReg.Evaluate(parameters) + bigRegTerm,
+        rSVDFuncBigReg.Evaluate(parameters), 1e-5);
+  }
+}
+
+BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionGradient)
+{
+  // Define useful constants.
+  const size_t numUsers = 50;
+  const size_t numItems = 50;
+  const size_t numRatings = 100;
+  const size_t maxRating = 5;
+  const size_t rank = 10;
+  
+  // Make a random rating dataset.
+  arma::mat data = arma::randu(3, numRatings);
+  data.row(0) = floor(data.row(0) * numUsers);
+  data.row(1) = floor(data.row(1) * numItems);
+  data.row(2) = floor(data.row(2) * maxRating + 0.5);
+  
+  arma::mat parameters = arma::randu(rank, numUsers + numItems);
+  
+  // Make two RegularizedSVDFunction objects, one with regularization and one
+  // without.
+  RegularizedSVDFunction rSVDFunc1(data, rank, 0);
+  RegularizedSVDFunction rSVDFunc2(data, rank, 0.5);
+  
+  // Calculate gradients for both the objects.
+  arma::mat gradient1, gradient2;
+  rSVDFunc1.Gradient(parameters, gradient1);
+  rSVDFunc2.Gradient(parameters, gradient2);
+  
+  // Perturbation constant.
+  const double epsilon = 0.0001;
+  double costPlus1, costMinus1, numGradient1;
+  double costPlus2, costMinus2, numGradient2;
+  
+  for(size_t i = 0; i < rank; i++)
+  {
+    for(size_t j = 0; j < numUsers + numItems; j++)
+    {
+      // Perturb parameter with a positive constant and get costs.
+      parameters(i, j) += epsilon;
+      costPlus1 = rSVDFunc1.Evaluate(parameters);
+      costPlus2 = rSVDFunc2.Evaluate(parameters);
+
+      // Perturb parameter with a negative constant and get costs.
+      parameters(i, j) -= 2 * epsilon;
+      costMinus1 = rSVDFunc1.Evaluate(parameters);
+      costMinus2 = rSVDFunc2.Evaluate(parameters);
+      
+      // Compute numerical gradients using the costs calculated above.
+      numGradient1 = (costPlus1 - costMinus1) / (2 * epsilon);
+      numGradient2 = (costPlus2 - costMinus2) / (2 * epsilon);
+      
+      // Restore the parameter value.
+      parameters(i, j) += epsilon;
+
+      // Compare numerical and backpropagation gradient values.
+      BOOST_REQUIRE_CLOSE(numGradient1, gradient1(i, j), 1e-2);
+      BOOST_REQUIRE_CLOSE(numGradient2, gradient2(i, j), 1e-2);
+    }
+  }
+}
+
+BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionOptimize)
+{
+  // Define useful constants.
+  const size_t numUsers = 50;
+  const size_t numItems = 50;
+  const size_t numRatings = 100;
+  const size_t iterations = 10;
+  const size_t rank = 10;
+  const double alpha = 0.01;
+  const double lambda = 0.01;
+  
+  // Initiate random parameters.
+  arma::mat parameters = arma::randu(rank, numUsers + numItems);
+  
+  // Make a random rating dataset.
+  arma::mat data = arma::randu(3, numRatings);
+  data.row(0) = floor(data.row(0) * numUsers);
+  data.row(1) = floor(data.row(1) * numItems);
+  
+  // Make rating entries based on the parameters.
+  for(size_t i = 0; i < numRatings; i++)
+  {
+    data(2, i) = arma::dot(parameters.col(data(0, i)),
+                           parameters.col(numUsers + data(1, i)));
+  }
+  
+  // Make the Reg SVD function and the optimizer.
+  RegularizedSVDFunction rSVDFunc(data, rank, lambda);
+  mlpack::optimization::SGD<RegularizedSVDFunction> optimizer(rSVDFunc, 
+      alpha, iterations * numRatings);
+  
+  // Obtain optimized parameters after training.
+  arma::mat optParameters = arma::randu(rank, numUsers + numItems);
+  optimizer.Optimize(optParameters);
+  
+  // Get predicted ratings from optimized parameters.
+  arma::mat predictedData(1, numRatings);
+  for(size_t i = 0; i < numRatings; i++)
+  {
+    predictedData(0, i) = arma::dot(optParameters.col(data(0, i)),
+                                    optParameters.col(numUsers + data(1, i)));
+  }
+  
+  // Calculate relative error.
+  double relativeError = arma::norm(data.row(2) - predictedData, "frob") /
+                         arma::norm(data, "frob");
+  
+  // Relative error should be small.
+  BOOST_REQUIRE_SMALL(relativeError, 1e-2);
+}
+
+BOOST_AUTO_TEST_SUITE_END();



More information about the mlpack-git mailing list