[mlpack-git] master: Style fixes. Remove constructor that takes a file. (cbeb3ea)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Tue Sep 29 09:33:55 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/cbeb3ea17262b7c5115247dc217e316c529249b7...f85a9b22f3ce56143943a2488c05c2810d6b2bf3

>---------------------------------------------------------------

commit cbeb3ea17262b7c5115247dc217e316c529249b7
Author: Ryan Curtin <ryan at ratml.org>
Date:   Mon Sep 28 16:57:52 2015 -0400

    Style fixes.  Remove constructor that takes a file.


>---------------------------------------------------------------

cbeb3ea17262b7c5115247dc217e316c529249b7
 .../softmax_regression/softmax_regression.hpp      | 70 ++++++----------------
 .../softmax_regression_function.cpp                | 13 ++--
 .../softmax_regression_function.hpp                | 36 ++++-------
 .../softmax_regression/softmax_regression_impl.hpp | 58 ++++++++----------
 4 files changed, 60 insertions(+), 117 deletions(-)

diff --git a/src/mlpack/methods/softmax_regression/softmax_regression.hpp b/src/mlpack/methods/softmax_regression/softmax_regression.hpp
index 0c0d2b0..bc44c36 100644
--- a/src/mlpack/methods/softmax_regression/softmax_regression.hpp
+++ b/src/mlpack/methods/softmax_regression/softmax_regression.hpp
@@ -54,18 +54,16 @@ namespace regression {
  * regressor2.Predict(test_data, predictions2);
  * @endcode
  */
-
 template<
   template<typename> class OptimizerType = mlpack::optimization::L_BFGS
-  >
+>
 class SoftmaxRegression
 {
  public:
   /**
-   * Initialize the SoftmaxRegression without performing training.
-   * Default value of lambda is 0.0001.
-   * Be sure to use Train() before calling Predict() or ComputeAccuracy(),
-   * otherwise the results may be meaningless.
+   * Initialize the SoftmaxRegression without performing training.  Default
+   * value of lambda is 0.0001.  Be sure to use Train() before calling Predict()
+   * or ComputeAccuracy(), otherwise the results may be meaningless.
    *
    * @param inputSize Size of the input feature vector.
    * @param numClasses Number of classes for classification.
@@ -77,17 +75,6 @@ class SoftmaxRegression
 
   /**
    * Construct the SoftmaxRegression class with the provided data and labels.
-   * This will train the model.
-   *
-   * @param fileName name of the files saving the model contents
-   * @param name name of the structure to be save
-   * @exception If the file cannot be load, the exception will thrown
-   */
-  SoftmaxRegression(const std::string &fileName,
-                    const std::string& name);
-
-  /**
-   * Construct the SoftmaxRegression class with the provided data and labels.
    * This will train the model. Optionally, the parameter 'lambda' can be
    * passed, which controls the amount of L2-regularization in the objective
    * function. By default, the model takes a small value.
@@ -158,51 +145,30 @@ class SoftmaxRegression
                const size_t numClasses);
 
   //! Sets the size of the input vector.
-  size_t& InputSize() {
-    return inputSize;
-  }
+  size_t& InputSize() { return inputSize; }
   //! Gets the size of the input vector.
-  size_t InputSize() const {
-    return inputSize;
-  }
+  size_t InputSize() const { return inputSize; }
 
   //! Sets the number of classes.
-  size_t& NumClasses() {
-    return numClasses;
-  }
+  size_t& NumClasses() { return numClasses; }
   //! Gets the number of classes.
-  size_t NumClasses() const {
-    return numClasses;
-  }
+  size_t NumClasses() const { return numClasses; }
 
   //! Sets the regularization parameter.
-  double& Lambda() {
-    return lambda;
-  }
+  double& Lambda() { return lambda; }
   //! Gets the regularization parameter.
-  double Lambda() const {
-    return lambda;
-  }
+  double Lambda() const { return lambda; }
 
   //! Gets the intercept term flag.  We can't change this after training.
-  bool FitIntercept() const {
-    return fitIntercept;
-  }
+  bool FitIntercept() const { return fitIntercept; }
 
-  //! get the training parameters
-  arma::mat& Parameters()
-  {
-    return parameters;
-  }
-
-  //! get the training parameters
-  const arma::mat& Parameters() const
-  {
-    return parameters;
-  }
+  //! Get the model parameters.
+  arma::mat& Parameters() { return parameters; }
+  //! Get the model parameters.
+  const arma::mat& Parameters() const { return parameters; }
 
   /**
-   * Serialize the SparseAutoencoder
+   * Serialize the SoftmaxRegression model. 
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
@@ -229,8 +195,8 @@ class SoftmaxRegression
   bool fitIntercept;
 };
 
-}; // namespace regression
-}; // namespace mlpack
+} // namespace regression
+} // namespace mlpack
 
 // Include implementation.
 #include "softmax_regression_impl.hpp"
diff --git a/src/mlpack/methods/softmax_regression/softmax_regression_function.cpp b/src/mlpack/methods/softmax_regression/softmax_regression_function.cpp
index a63ccf6..e317831 100644
--- a/src/mlpack/methods/softmax_regression/softmax_regression_function.cpp
+++ b/src/mlpack/methods/softmax_regression/softmax_regression_function.cpp
@@ -15,11 +15,11 @@ SoftmaxRegressionFunction::SoftmaxRegressionFunction(const arma::mat& data,
                                                      const size_t numClasses,
                                                      const double lambda,
                                                      const bool fitIntercept) :
-  data(data),
-  inputSize(inputSize),
-  numClasses(numClasses),
-  lambda(lambda),
-  fitIntercept(fitIntercept)
+    data(data),
+    inputSize(inputSize),
+    numClasses(numClasses),
+    lambda(lambda),
+    fitIntercept(fitIntercept)
 {
   // Intialize the parameters to suitable values.
   initialPoint = InitializeWeights();
@@ -87,7 +87,8 @@ void SoftmaxRegressionFunction::GetGroundTruthMatrix(const arma::vec& labels,
  * it should consider the parameters.cols(0) intercept term.
  */
 void SoftmaxRegressionFunction::GetProbabilitiesMatrix(
-  const arma::mat& parameters, arma::mat& probabilities) const
+    const arma::mat& parameters, 
+    arma::mat& probabilities) const
 {
   arma::mat hypothesis;
 
diff --git a/src/mlpack/methods/softmax_regression/softmax_regression_function.hpp b/src/mlpack/methods/softmax_regression/softmax_regression_function.hpp
index 33ffdfb..d45d07a 100644
--- a/src/mlpack/methods/softmax_regression/softmax_regression_function.hpp
+++ b/src/mlpack/methods/softmax_regression/softmax_regression_function.hpp
@@ -80,41 +80,25 @@ class SoftmaxRegressionFunction
   void Gradient(const arma::mat& parameters, arma::mat& gradient) const;
 
   //! Return the initial point for the optimization.
-  const arma::mat& GetInitialPoint() const {
-    return initialPoint;
-  }
+  const arma::mat& GetInitialPoint() const { return initialPoint; }
 
   //! Sets the size of the input vector.
-  size_t& InputSize() {
-    return inputSize;
-  }
+  size_t& InputSize() { return inputSize; }
   //! Gets the size of the input vector.
-  size_t InputSize() const {
-    return inputSize;
-  }
+  size_t InputSize() const { return inputSize; }
 
   //! Sets the number of classes.
-  size_t& NumClasses() {
-    return numClasses;
-  }
+  size_t& NumClasses() { return numClasses; }
   //! Gets the number of classes.
-  size_t NumClasses() const {
-    return numClasses;
-  }
+  size_t NumClasses() const { return numClasses; }
 
   //! Sets the regularization parameter.
-  double& Lambda() {
-    return lambda;
-  }
+  double& Lambda() { return lambda; }
   //! Gets the regularization parameter.
-  double Lambda() const {
-    return lambda;
-  }
+  double Lambda() const { return lambda; }
 
   //! Gets the intercept flag.
-  bool FitIntercept() const {
-    return fitIntercept;
-  }
+  bool FitIntercept() const { return fitIntercept; }
 
  private:
   //! Training data matrix.
@@ -133,7 +117,7 @@ class SoftmaxRegressionFunction
   bool fitIntercept;
 };
 
-}; // namespace regression
-}; // namespace mlpack
+} // namespace regression
+} // namespace mlpack
 
 #endif
diff --git a/src/mlpack/methods/softmax_regression/softmax_regression_impl.hpp b/src/mlpack/methods/softmax_regression/softmax_regression_impl.hpp
index 2cf369f..913b6e1 100644
--- a/src/mlpack/methods/softmax_regression/softmax_regression_impl.hpp
+++ b/src/mlpack/methods/softmax_regression/softmax_regression_impl.hpp
@@ -18,10 +18,10 @@ SoftmaxRegression<OptimizerType>::
 SoftmaxRegression(const size_t inputSize,
                   const size_t numClasses,
                   const bool fitIntercept) :
-  inputSize{inputSize},
-  numClasses{numClasses},
-  lambda{0.0001},
-  fitIntercept{fitIntercept}
+    inputSize(inputSize),
+    numClasses(numClasses),
+    lambda(0.0001),
+    fitIntercept(fitIntercept)
 {
   SoftmaxRegressionFunction regressor(arma::mat(), 1,
                                       inputSize, numClasses,
@@ -30,24 +30,16 @@ SoftmaxRegression(const size_t inputSize,
 }
 
 template<template<typename> class OptimizerType>
-SoftmaxRegression<OptimizerType>::
-SoftmaxRegression(const std::string &fileName,
-                  const std::string& name)
-{
-  data::Load(fileName, name, *this, true);
-}
-
-template<template<typename> class OptimizerType>
 SoftmaxRegression<OptimizerType>::SoftmaxRegression(const arma::mat& data,
                                                     const arma::vec& labels,
                                                     const size_t inputSize,
                                                     const size_t numClasses,
                                                     const double lambda,
                                                     const bool fitIntercept) :
-  inputSize{inputSize},
-  numClasses{numClasses},
-  lambda{lambda},
-  fitIntercept{fitIntercept}
+    inputSize(inputSize),
+    numClasses(numClasses),
+    lambda(lambda),
+    fitIntercept(fitIntercept)
 {
   SoftmaxRegressionFunction regressor(data, labels, inputSize, numClasses,
                                       lambda, fitIntercept);
@@ -59,12 +51,12 @@ SoftmaxRegression<OptimizerType>::SoftmaxRegression(const arma::mat& data,
 
 template<template<typename> class OptimizerType>
 SoftmaxRegression<OptimizerType>::SoftmaxRegression(
-  OptimizerType<SoftmaxRegressionFunction>& optimizer) :
-  parameters(optimizer.Function().GetInitialPoint()),
-  inputSize{optimizer.Function().InputSize()},
-  numClasses{optimizer.Function().NumClasses()},
-  lambda{optimizer.Function().Lambda()},
-  fitIntercept{optimizer.Function().FitIntercept()}
+    OptimizerType<SoftmaxRegressionFunction>& optimizer) :
+    parameters(optimizer.Function().GetInitialPoint()),
+    inputSize(optimizer.Function().InputSize()),
+    numClasses(optimizer.Function().NumClasses()),
+    lambda(optimizer.Function().Lambda()),
+    fitIntercept(optimizer.Function().FitIntercept())
 {
   Train(optimizer);
 }
@@ -120,8 +112,8 @@ void SoftmaxRegression<OptimizerType>::Predict(const arma::mat& testData,
 
 template<template<typename> class OptimizerType>
 double SoftmaxRegression<OptimizerType>::ComputeAccuracy(
-  const arma::mat& testData,
-  const arma::vec& labels)
+    const arma::mat& testData,
+    const arma::vec& labels)
 {
   arma::vec predictions;
 
@@ -130,8 +122,8 @@ double SoftmaxRegression<OptimizerType>::ComputeAccuracy(
 
   // Increment count for every correctly predicted label.
   size_t count = 0;
-  for(size_t i = 0; i < predictions.n_elem; i++)
-    if(predictions(i) == labels(i))
+  for (size_t i = 0; i < predictions.n_elem; i++)
+    if (predictions(i) == labels(i))
       count++;
 
   // Return percentage accuracy.
@@ -139,8 +131,8 @@ double SoftmaxRegression<OptimizerType>::ComputeAccuracy(
 }
 
 template<template<typename> class OptimizerType>
-double SoftmaxRegression<OptimizerType>::
-Train(OptimizerType<SoftmaxRegressionFunction>& optimizer)
+double SoftmaxRegression<OptimizerType>::Train(
+    OptimizerType<SoftmaxRegressionFunction>& optimizer)
 {
   // Train the model.
   Timer::Start("softmax_regression_optimization");
@@ -154,9 +146,9 @@ Train(OptimizerType<SoftmaxRegressionFunction>& optimizer)
 }
 
 template<template<typename> class OptimizerType>
-double SoftmaxRegression<OptimizerType>::
-Train(const arma::mat &data, const arma::vec& labels,
-      const size_t numClasses)
+double SoftmaxRegression<OptimizerType>::Train(const arma::mat& data,
+                                               const arma::vec& labels,
+                                               const size_t numClasses)
 {
   SoftmaxRegressionFunction regressor(data, labels, data.n_rows, numClasses,
                                       lambda, fitIntercept);
@@ -165,7 +157,7 @@ Train(const arma::mat &data, const arma::vec& labels,
   return Train(optimizer);
 }
 
-}; // namespace regression
-}; // namespace mlpack
+} // namespace regression
+} // namespace mlpack
 
 #endif



More information about the mlpack-git mailing list