[mlpack-git] master: Add some const and fix some formatting. (b366a19)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Mar 5 21:57:14 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/904762495c039e345beba14c1142fd719b3bd50e...f94823c800ad6f7266995c700b1b630d5ffdcf40

>---------------------------------------------------------------

commit b366a19cb0549e5dfecbfc217d4be17ab8b701ef
Author: Ryan Curtin <ryan at ratml.org>
Date:   Thu Aug 7 15:14:05 2014 +0000

    Add some const and fix some formatting.


>---------------------------------------------------------------

b366a19cb0549e5dfecbfc217d4be17ab8b701ef
 src/mlpack/methods/adaboost/adaboost.hpp      | 25 ++++++++++++++-----------
 src/mlpack/methods/adaboost/adaboost_impl.hpp | 14 +++++++++-----
 2 files changed, 23 insertions(+), 16 deletions(-)

diff --git a/src/mlpack/methods/adaboost/adaboost.hpp b/src/mlpack/methods/adaboost/adaboost.hpp
index 838a61c..4f0ef30 100644
--- a/src/mlpack/methods/adaboost/adaboost.hpp
+++ b/src/mlpack/methods/adaboost/adaboost.hpp
@@ -30,22 +30,25 @@
 namespace mlpack {
 namespace adaboost {
 
-template <typename MatType = arma::mat, typename WeakLearner =
-          mlpack::perceptron::Perceptron<> >
+template<typename MatType = arma::mat,
+         typename WeakLearner = mlpack::perceptron::Perceptron<> >
 class Adaboost
 {
-public:
+ public:
   /**
-   *  Constructor. Currently runs the Adaboost.mh algorithm
+   * Constructor. Currently runs the Adaboost.mh algorithm.
    *
-   *  @param data Input data
-   *  @param labels Corresponding labels
-   *  @param iterations Number of boosting rounds
-   *  @param tol The tolerance for change in values of rt.
-   *  @param other Weak Learner, which has been initialized already
+   * @param data Input data.
+   * @param labels Corresponding labels.
+   * @param iterations Number of boosting rounds.
+   * @param tol The tolerance for change in values of rt.
+   * @param other Weak Learner, which has been initialized already.
    */
-  Adaboost(const MatType& data, const arma::Row<size_t>& labels,
-           int iterations, double tol, const WeakLearner& other);
+  Adaboost(const MatType& data,
+           const arma::Row<size_t>& labels,
+           const int iterations,
+           const double tol,
+           const WeakLearner& other);
 
   /**
    *  This function helps in building a classification Matrix which is of
diff --git a/src/mlpack/methods/adaboost/adaboost_impl.hpp b/src/mlpack/methods/adaboost/adaboost_impl.hpp
index 8c51441..139b9d8 100644
--- a/src/mlpack/methods/adaboost/adaboost_impl.hpp
+++ b/src/mlpack/methods/adaboost/adaboost_impl.hpp
@@ -25,6 +25,7 @@
 
 namespace mlpack {
 namespace adaboost {
+
 /**
  *  Constructor. Currently runs the Adaboost.mh algorithm
  *
@@ -34,12 +35,15 @@ namespace adaboost {
  *  @param other Weak Learner, which has been initialized already
  */
 template<typename MatType, typename WeakLearner>
-Adaboost<MatType, WeakLearner>::Adaboost(const MatType& data,
-        const arma::Row<size_t>& labels, int iterations, double tol,
-        const WeakLearner& other)
+Adaboost<MatType, WeakLearner>::Adaboost(
+    const MatType& data,
+    const arma::Row<size_t>& labels,
+    const int iterations,
+    const double tol,
+    const WeakLearner& other)
 {
-  // Counting the number of classes into numClasses.
-  size_t numClasses = (arma::max(labels) - arma::min(labels)) + 1;
+  // Count the number of classes.
+  const size_t numClasses = (arma::max(labels) - arma::min(labels)) + 1;
   tolerance = tol;
   int i, j, k;
   double rt, crt, alphat = 0.0, zt;



More information about the mlpack-git mailing list