[mlpack-svn] r13002 - mlpack/trunk/src/mlpack/methods/lars

fastlab-svn at coffeetalk-1.cc.gatech.edu fastlab-svn at coffeetalk-1.cc.gatech.edu
Sun Jun 10 17:34:55 EDT 2012


Author: rcurtin
Date: 2012-06-10 17:34:55 -0400 (Sun, 10 Jun 2012)
New Revision: 13002

Modified:
   mlpack/trunk/src/mlpack/methods/lars/lars.cpp
   mlpack/trunk/src/mlpack/methods/lars/lars.hpp
Log:
Change DoLARS() to more descriptive and less awkward Regress().  Minor code
cleanups, and turn comments into full sentences (grammar Nazi mode activated).


Modified: mlpack/trunk/src/mlpack/methods/lars/lars.cpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars.cpp	2012-06-10 21:27:36 UTC (rev 13001)
+++ mlpack/trunk/src/mlpack/methods/lars/lars.cpp	2012-06-10 21:34:55 UTC (rev 13002)
@@ -36,10 +36,10 @@
     tolerance(tolerance)
 { /* Nothing left to do */ }
 
-void LARS::DoLARS(const arma::mat& matX,
-                  const arma::vec& y,
-                  arma::vec& beta,
-                  const bool rowMajor)
+void LARS::Regress(const arma::mat& matX,
+                   const arma::vec& y,
+                   arma::vec& beta,
+                   const bool rowMajor)
 {
   // This matrix may end up holding the transpose -- if necessary.
   arma::mat dataTrans;
@@ -112,8 +112,6 @@
 
     if (!lassocond)
     {
-      // index is absolute index
-      //printf("activating %d\n", changeInd);
       if (useCholesky)
       {
         // vec newGramCol = vec(activeSet.size());
@@ -125,22 +123,21 @@
         arma::vec newGramCol = matGram.elem(changeInd * dataRef.n_cols +
             arma::conv_to<arma::uvec>::from(activeSet));
 
-        //CholeskyInsert(matX.col(changeInd), newGramCol);
         CholeskyInsert(matGram(changeInd, changeInd), newGramCol);
       }
 
-      // add variable to active set
+      // Add variable to active set.
       Activate(changeInd);
     }
 
-    // compute signs of correlations
+    // Compute signs of correlations.
     arma::vec s = arma::vec(activeSet.size());
     for (size_t i = 0; i < activeSet.size(); i++)
       s(i) = corr(activeSet[i]) / fabs(corr(activeSet[i]));
 
-    // compute "equiangular" direction in parameter space (betaDirection)
-    /* We use quotes because in the case of non-unit norm variables,
-       this need not be equiangular. */
+    // Compute the "equiangular" direction in parameter space (betaDirection).
+    // We use quotes because in the case of non-unit norm variables, this need
+    // not be equiangular.
     arma::vec unnormalizedBetaDirection;
     double normalization;
     arma::vec betaDirection;
@@ -182,10 +179,10 @@
 
     double gamma = maxCorr / normalization;
 
-    // if not all variables are active
+    // If not all variables are active.
     if (activeSet.size() < dataRef.n_cols)
     {
-      // compute correlations with direction
+      // Compute correlations with direction.
       for (size_t ind = 0; ind < dataRef.n_cols; ind++)
       {
         if (isActive[ind])
@@ -201,7 +198,7 @@
       }
     }
 
-    // bound gamma according to LASSO
+    // Bound gamma according to LASSO.
     if (lasso)
     {
       lassocond = false;
@@ -220,43 +217,33 @@
 
       if (lassoboundOnGamma < gamma)
       {
-        // printf("%d: gap = %e\tbeta(%d) = %e\n",
-        //    activeSet[activeIndToKickOut],
-        //    gamma - lassoboundOnGamma,
-        //    activeSet[activeIndToKickOut],
-        //    beta(activeSet[activeIndToKickOut]));
         gamma = lassoboundOnGamma;
         lassocond = true;
         changeInd = activeIndToKickOut;
       }
     }
 
-    // update prediction
+    // Update the prediction.
     yHat += gamma * yHatDirection;
 
-    // update estimator
+    // Update the estimator.
     for (size_t i = 0; i < activeSet.size(); i++)
     {
       beta(activeSet[i]) += gamma * betaDirection(i);
     }
 
-    // sanity check to make sure the kicked out guy (or girl?) is actually zero
+    // Sanity check to make sure the kicked out dimension is actually zero.
     if (lassocond)
     {
       if (beta(activeSet[changeInd]) != 0)
-      {
-        //printf("fixed from %e to 0\n", beta(activeSet[changeInd]));
         beta(activeSet[changeInd]) = 0;
-      }
     }
 
     betaPath.push_back(beta);
 
     if (lassocond)
     {
-      // index is in position changeInd in activeSet
-      //printf("\t\tKICK OUT %d!\n", activeSet[changeInd]);
-
+      // Index is in position changeInd in activeSet.
       if (useCholesky)
         CholeskyDelete(changeInd);
 

Modified: mlpack/trunk/src/mlpack/methods/lars/lars.hpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars.hpp	2012-06-10 21:27:36 UTC (rev 13001)
+++ mlpack/trunk/src/mlpack/methods/lars/lars.hpp	2012-06-10 21:34:55 UTC (rev 13002)
@@ -34,13 +34,15 @@
  * regression (Elastic Net).
  *
  * Let \f$ X \f$ be a matrix where each row is a point and each column is a
- * dimension and let \f$ y \f$ be a vector of targets.
+ * dimension and let \f$ y \f$ be a vector of responses.
  *
  * The Elastic Net problem is to solve
  *
  * \f[ \min_{\beta} 0.5 || X \beta - y ||_2^2 + \lambda_1 || \beta ||_1 +
  *     0.5 \lambda_2 || \beta ||_2^2 \f]
  *
+ * where \f$ \beta \f$ is the vector of regression coefficients.
+ *
  * If \f$ \lambda_1 > 0 \f$ and \f$ \lambda_2 = 0 \f$, the problem is the LASSO.
  * If \f$ \lambda_1 > 0 \f$ and \f$ \lambda_2 > 0 \f$, the problem is the
  *   elastic net.
@@ -124,29 +126,28 @@
    * necessary (i.e., you want to pass in a row-major matrix), pass 'true' for
    * the rowMajor parameter.
    *
-   * @param matX Column-major input data (or row-major input data if rowMajor =
+   * @param data Column-major input data (or row-major input data if rowMajor =
    *     true).
-   * @param y A vector of targets.
-   * @param beta Vector to store the solution in.
+   * @param responses A vector of targets.
+   * @param beta Vector to store the solution (the coefficients) in.
    * @param rowMajor Set to true if matX is row-major.
    */
-  void DoLARS(const arma::mat& matX,
-              const arma::vec& y,
-              arma::vec& beta,
-              const bool rowMajor = false);
+  void Regress(const arma::mat& data,
+               const arma::vec& responses,
+               arma::vec& beta,
+               const bool rowMajor = false);
 
-  //! Accessor for activeSet.
+  //! Access the set of active dimensions.
   const std::vector<size_t>& ActiveSet() const { return activeSet; }
 
-  //! Accessor for betaPath.
+  //! Access the set of coefficients after each iteration; the solution is the
+  //! last element.
   const std::vector<arma::vec>& BetaPath() const { return betaPath; }
 
-  //! Accessor for lambdaPath.
+  //! Access the set of values for lambda1 after each iteration; the solution is
+  //! the last element.
   const std::vector<double>& LambdaPath() const { return lambdaPath; }
 
-  //! Accessor for matUtriCholFactor.
-  const arma::mat& MatUtriCholFactor() const { return matUtriCholFactor; }
-
 private:
   //! Gram matrix.
   arma::mat matGramInternal;




More information about the mlpack-svn mailing list