[mlpack-svn] r10727 - mlpack/trunk/src/mlpack/methods/lars
fastlab-svn at coffeetalk-1.cc.gatech.edu
fastlab-svn at coffeetalk-1.cc.gatech.edu
Mon Dec 12 13:28:27 EST 2011
Author: niche
Date: 2011-12-12 13:28:26 -0500 (Mon, 12 Dec 2011)
New Revision: 10727
Modified:
mlpack/trunk/src/mlpack/methods/lars/lars.cpp
mlpack/trunk/src/mlpack/methods/lars/lars.hpp
mlpack/trunk/src/mlpack/methods/lars/lars_main.cpp
Log:
LARS no longer has X and y as class members. They are passed into the main computation function DoLars
Modified: mlpack/trunk/src/mlpack/methods/lars/lars.cpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars.cpp 2011-12-12 18:22:48 UTC (rev 10726)
+++ mlpack/trunk/src/mlpack/methods/lars/lars.cpp 2011-12-12 18:28:26 UTC (rev 10727)
@@ -16,22 +16,14 @@
namespace mlpack {
namespace lars {
-LARS::LARS(const mat& matX,
- const vec& y,
- const bool useCholesky) :
- matX(matX),
- y(y),
+LARS::LARS(const bool useCholesky) :
useCholesky(useCholesky),
lasso(false),
elasticNet(false)
{ /* nothing left to do */ }
-LARS::LARS(const mat& matX,
- const vec& y,
- const bool useCholesky,
+LARS::LARS(const bool useCholesky,
const double lambda1) :
- matX(matX),
- y(y),
useCholesky(useCholesky),
lasso(true),
lambda1(lambda1),
@@ -39,13 +31,9 @@
lambda2(0)
{ /* nothing left to do */ }
-LARS::LARS(const mat& matX,
- const vec& y,
- const bool useCholesky,
+LARS::LARS(const bool useCholesky,
const double lambda1,
const double lambda2) :
- matX(matX),
- y(y),
useCholesky(useCholesky),
lasso(true),
lambda1(lambda1),
@@ -58,7 +46,7 @@
}
-void LARS::ComputeGram()
+void LARS::ComputeGram(const mat& matX)
{
if (elasticNet)
{
@@ -70,11 +58,6 @@
}
}
-void LARS::ComputeXty()
-{
- vecXTy = trans(matX) * y;
-}
-
const std::vector<u32> LARS::ActiveSet()
{
return activeSet;
@@ -95,13 +78,15 @@
return matUtriCholFactor;
}
-void LARS::DoLARS()
+void LARS::DoLARS(const mat& matX, const vec& y)
{
- // compute Gram matrix, XtY, and initialize active set varibles
- ComputeXty();
+ // compute Xty
+ vec vecXTy = trans(matX) * y;
+
+ // compute Gram matrix
if (!useCholesky && matGram.is_empty())
{
- ComputeGram();
+ ComputeGram(matX);
}
// set up active set variables
@@ -230,7 +215,7 @@
}
// compute "equiangular" direction in output space
- ComputeYHatDirection(betaDirection, yHatDirection);
+ ComputeYHatDirection(matX, betaDirection, yHatDirection);
double gamma = maxCorr / normalization;
@@ -367,8 +352,9 @@
activeSet.push_back(varInd);
}
-void LARS::ComputeYHatDirection(const vec& betaDirection,
- vec& yHatDirection)
+ void LARS::ComputeYHatDirection(const mat& matX,
+ const vec& betaDirection,
+ vec& yHatDirection)
{
yHatDirection.fill(0);
for(u32 i = 0; i < nActive; i++)
Modified: mlpack/trunk/src/mlpack/methods/lars/lars.hpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars.hpp 2011-12-12 18:22:48 UTC (rev 10726)
+++ mlpack/trunk/src/mlpack/methods/lars/lars.hpp 2011-12-12 18:28:26 UTC (rev 10727)
@@ -31,12 +31,19 @@
* If lambda_1 > 0, lambda_2 > 0, the problem is the Elastic Net.
* If lambda_1 = 0, lambda_2 > 0, the problem is Ridge Regression.
* If lambda_1 = 0, lambda_2 = 0, the problem is unregularized linear regression.
+ *
* Note: This algorithm is not recommended for use (in terms of efficiency)
* when lambda_1 = 0.
+ *
+ * Only minor modifications are necessary to handle the constrained version of
+ * the problem:
+ * min_beta ||X beta - y||_2^2 + 0.5 lambda_2 ||beta||_2^2
+ * subject to ||beta||_1 <= tau
+ * Although this option currently is not implemented, it will be implemented
+ * very soon.
*
* For more details, see the following papers:
*
- *
* @article{efron2004least,
* title={Least angle regression},
* author={Efron, B. and Hastie, T. and Johnstone, I. and Tibshirani, R.},
@@ -48,7 +55,6 @@
* publisher={Institute of Mathematical Statistics}
* }
*
- *
* @article{zou2005regularization,
* title={Regularization and variable selection via the elastic net},
* author={Zou, H. and Hastie, T.},
@@ -64,29 +70,21 @@
public:
- LARS(const arma::mat& matX,
- const arma::vec& y,
- const bool useCholesky);
+ LARS(const bool useCholesky);
- LARS(const arma::mat& matX,
- const arma::vec& y,
- const bool useCholesky,
+ LARS(const bool useCholesky,
const double lambda1);
- LARS(const arma::mat& matX,
- const arma::vec& y,
- const bool useCholesky,
+ LARS(const bool useCholesky,
const double lambda1,
const double lambda2);
~LARS() { }
void SetGram(const arma::mat& matGram);
-
- void ComputeGram();
-
- void ComputeXty();
-
+
+ void ComputeGram(const arma::mat& matX);
+
const std::vector<arma::u32> ActiveSet();
const std::vector<arma::vec> BetaPath();
@@ -95,16 +93,12 @@
const arma::mat MatUtriCholFactor();
- void DoLARS();
+ void DoLARS(const arma::mat& matX, const arma::vec& y);
void Solution(arma::vec& beta);
-
+
private:
- arma::mat matX;
- arma::vec y;
-
- arma::vec vecXTy;
arma::mat matGram;
// Upper triangular cholesky factor; initially 0x0 arma::matrix.
@@ -130,7 +124,8 @@
void Activate(arma::u32 varInd);
- void ComputeYHatDirection(const arma::vec& betaDirection,
+ void ComputeYHatDirection(const arma::mat& matX,
+ const arma::vec& betaDirection,
arma::vec& yHatDirection);
void InterpolateBeta();
Modified: mlpack/trunk/src/mlpack/methods/lars/lars_main.cpp
===================================================================
--- mlpack/trunk/src/mlpack/methods/lars/lars_main.cpp 2011-12-12 18:22:48 UTC (rev 10726)
+++ mlpack/trunk/src/mlpack/methods/lars/lars_main.cpp 2011-12-12 18:28:26 UTC (rev 10727)
@@ -49,8 +49,8 @@
y.load(yFilename, raw_ascii);
// do LARS
- LARS lars(matX, y, useCholesky, lambda1, lambda2);
- lars.DoLARS();
+ LARS lars(useCholesky, lambda1, lambda2);
+ lars.DoLARS(matX, y);
// get and save solution
vec beta;
More information about the mlpack-svn
mailing list