[mlpack-svn] r11776 - mlpack/trunk/src/mlpack/methods/lars

fastlab-svn at coffeetalk-1.cc.gatech.edu fastlab-svn at coffeetalk-1.cc.gatech.edu
Wed Mar 7 13:10:48 EST 2012


Author: niche
Date: 2012-03-07 13:10:47 -0500 (Wed, 07 Mar 2012)
New Revision: 11776

Modified:
   mlpack/trunk/src/mlpack/methods/lars/lars.cpp
   mlpack/trunk/src/mlpack/methods/lars/lars.hpp
Log:
when useCholesky=true, we now use a once-computed Gram matrix for inner product computations between points. If one is calling LARS repeatedly with the same data X (as in sparse coding), one should use SetGram even when useCholesky=true, and in particular, it is better to use SetGramMem

Modified: mlpack/trunk/src/mlpack/methods/lars/lars.cpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars.cpp	2012-03-07 03:21:52 UTC (rev 11775)
+++ mlpack/trunk/src/mlpack/methods/lars/lars.cpp	2012-03-07 18:10:47 UTC (rev 11776)
@@ -44,6 +44,11 @@
   this->matGram = matGram;
 }
 
+void LARS::SetGramMem(double* matGramMemPtr, u32 nDims)
+{
+  this->matGram = mat(matGramMemPtr, nDims, nDims, false);
+}
+
 void LARS::ComputeGram(const mat& matX)
 {
   if (elasticNet)
@@ -61,12 +66,6 @@
   // compute Xty
   vec vecXTy = trans(matX) * y;
   
-  // compute Gram matrix
-  if (!useCholesky && matGram.is_empty())
-  {
-    ComputeGram(matX);
-  }
-
   // set up active set variables
   nActive = 0;
   activeSet = std::vector<u32>(0);
@@ -101,6 +100,16 @@
     return;
   }
 
+  // compute Gram matrix
+  if (!useCholesky && matGram.is_empty())
+  {
+    ComputeGram(matX);
+  }
+  else if(useCholesky && matGram.is_empty()) {
+    //Log::Info << "You probably should compute the Gram matrix ahead of time when in Cholesky mode!\n";
+    matGram = trans(matX) * matX;
+  }
+  
   //u32 iterations_run = 0;
   // MAIN LOOP
   while ((nActive < matX.n_cols) && (maxCorr > EPS))
@@ -129,13 +138,15 @@
       //printf("activating %d\n", changeInd);
       if (useCholesky)
       {
-        vec newGramCol = vec(nActive);
-        for (u32 i = 0; i < nActive; i++)
-        {
-          newGramCol[i] = dot(matX.col(activeSet[i]), matX.col(changeInd));
-        }
-
-        CholeskyInsert(matX.col(changeInd), newGramCol);
+        // vec newGramCol = vec(nActive);
+        // for (u32 i = 0; i < nActive; i++)
+        // {
+        //   newGramCol[i] = dot(matX.col(activeSet[i]), matX.col(changeInd));
+        // }
+	vec newGramCol = matGram.elem(changeInd * matX.n_cols + conv_to< uvec >::from(activeSet)); // this is equivalent to the above 5 lines - check this!
+	
+        //CholeskyInsert(matX.col(changeInd), newGramCol);
+        CholeskyInsert(matGram(changeInd, changeInd), newGramCol);
       }
 
       // add variable to active set
@@ -380,11 +391,11 @@
   else
   {
     vec newGramCol = trans(X) * newX;
-    CholeskyInsert(newX, newGramCol);
+    CholeskyInsert(dot(newX, newX), newGramCol);
   }
 }
 
-void LARS::CholeskyInsert(const vec& newX, const vec& newGramCol)
+void LARS::CholeskyInsert(double sqNormNewX, const vec& newGramCol)
 {
   int n = matUtriCholFactor.n_rows;
 
@@ -393,41 +404,36 @@
     matUtriCholFactor = mat(1, 1);
     if (elasticNet)
     {
-      matUtriCholFactor(0, 0) = sqrt(dot(newX, newX) + lambda2);
+      matUtriCholFactor(0, 0) = sqrt(sqNormNewX + lambda2);
     }
     else
     {
-      matUtriCholFactor(0, 0) = norm(newX, 2);
+      matUtriCholFactor(0, 0) = sqrt(sqNormNewX);
     }
   }
   else
   {
     mat matNewR = mat(n + 1, n + 1);
 
-    double sqNormNewX;
     if (elasticNet)
     {
-      sqNormNewX = dot(newX, newX) + lambda2;
+      sqNormNewX += lambda2;
     }
-    else
-    {
-      sqNormNewX = dot(newX, newX);
-    }
-
+      
     vec matUtriCholFactork = solve(trimatl(trans(matUtriCholFactor)),
-        newGramCol);
+				   newGramCol);
 
     matNewR(span(0, n - 1), span(0, n - 1)) = matUtriCholFactor;
     matNewR(span(0, n - 1), n) = matUtriCholFactork;
     matNewR(n, span(0, n - 1)).fill(0.0);
     matNewR(n, n) = sqrt(sqNormNewX - dot(matUtriCholFactork,
-        matUtriCholFactork));
+					  matUtriCholFactork));
 
     matUtriCholFactor = matNewR;
   }
 }
 
-void LARS::GivensRotate(const vec& x, vec& rotatedX, mat& matG) 
+void LARS::GivensRotate(const vec::fixed<2>& x, vec::fixed<2>& rotatedX, mat& matG) 
 {
   if (x(1) == 0)
   {
@@ -469,7 +475,7 @@
     for(u32 k = colToKill; k < n; k++)
     {
       mat matG;
-      vec rotatedVec;
+      vec::fixed<2> rotatedVec;
       GivensRotate(matUtriCholFactor(span(k, k + 1), k), rotatedVec, matG);
       matUtriCholFactor(span(k, k + 1), k) = rotatedVec;
       if (k < n - 1)

Modified: mlpack/trunk/src/mlpack/methods/lars/lars.hpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars.hpp	2012-03-07 03:21:52 UTC (rev 11775)
+++ mlpack/trunk/src/mlpack/methods/lars/lars.hpp	2012-03-07 18:10:47 UTC (rev 11776)
@@ -114,6 +114,13 @@
   void SetGram(const arma::mat& matGram);
 
   /**
+   * Set the Gram matrix (done before calling DoLars) by reusing memory.
+   *
+   * @param matGram Matrix to which to set Gram matrix
+   */
+  void SetGramMem(double* matGramMemPtr, arma::u32 nDims);
+
+  /**
    * Compute Gram matrix. If elastic net, add lambda2 * identity to diagonal.
    *
    * @param matX Data matrix to use for computing Gram matrix
@@ -192,9 +199,9 @@
 
   void CholeskyInsert(const arma::vec& newX, const arma::mat& X);
 
-  void CholeskyInsert(const arma::vec& newX, const arma::vec& newGramCol);
+  void CholeskyInsert(double sqNormNewX, const arma::vec& newGramCol);
 
-  void GivensRotate(const arma::vec& x, arma::vec& rotatedX, arma::mat& G);
+  void GivensRotate(const arma::vec::fixed<2>& x, arma::vec::fixed<2>& rotatedX, arma::mat& G);
 
   void CholeskyDelete(arma::u32 colToKill);
 




More information about the mlpack-svn mailing list