[mlpack-git] master: Comparision-type warning sorted out. (eb787c1)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Mar 5 22:00:17 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/904762495c039e345beba14c1142fd719b3bd50e...f94823c800ad6f7266995c700b1b630d5ffdcf40

>---------------------------------------------------------------

commit eb787c1d37c6d79759f5b62c5f5766412b8b0cc8
Author: Udit Saxena <saxena.udit at gmail.com>
Date:   Mon Sep 15 10:32:25 2014 +0000

    Comparision-type warning sorted out.


>---------------------------------------------------------------

eb787c1d37c6d79759f5b62c5f5766412b8b0cc8
 src/mlpack/methods/adaboost/adaboost_impl.hpp          | 18 +++++++++---------
 .../methods/decision_stump/decision_stump_impl.hpp     | 14 +++++++-------
 2 files changed, 16 insertions(+), 16 deletions(-)

diff --git a/src/mlpack/methods/adaboost/adaboost_impl.hpp b/src/mlpack/methods/adaboost/adaboost_impl.hpp
index c884025..695ab9e 100644
--- a/src/mlpack/methods/adaboost/adaboost_impl.hpp
+++ b/src/mlpack/methods/adaboost/adaboost_impl.hpp
@@ -101,7 +101,7 @@ AdaBoost<MatType, WeakLearner>::AdaBoost(
 
     // Now, start calculation of alpha(t) using ht
 
-    for (int j = 0;j < D.n_rows; j++) // instead of D, ht
+    for (size_t j = 0;j < D.n_rows; j++) // instead of D, ht
     {
       if (predictedLabels(j) == labels(j))
       {
@@ -135,12 +135,12 @@ AdaBoost<MatType, WeakLearner>::AdaBoost(
     wl.push_back(w);
 
     // now start modifying weights
-    for (int j = 0;j < D.n_rows; j++)
+    for (size_t j = 0;j < D.n_rows; j++)
     {
       double expo = exp(alphat);
       if (predictedLabels(j) == labels(j))
       {
-          for (int k = 0;k < D.n_cols; k++)
+          for (size_t k = 0;k < D.n_cols; k++)
           {
             // we calculate zt, the normalization constant
             zt += D(j,k) / expo; // * exp(-1 * alphat * yt(j,k) * ht(j,k));
@@ -156,7 +156,7 @@ AdaBoost<MatType, WeakLearner>::AdaBoost(
       }
       else
       {
-        for (int k = 0;k < D.n_cols; k++)
+        for (size_t k = 0;k < D.n_cols; k++)
           {
             // we calculate zt, the normalization constant
             zt += D(j,k) * expo;
@@ -185,7 +185,7 @@ AdaBoost<MatType, WeakLearner>::AdaBoost(
   arma::uword max_index;
   arma::mat sfh = sumFinalH.t();
 
-  for (int i = 0;i < sfh.n_cols; i++)
+  for (size_t i = 0;i < sfh.n_cols; i++)
   {
     tempSumFinalH = sfh.col(i);
     tempSumFinalH.max(max_index);
@@ -211,18 +211,18 @@ void AdaBoost<MatType, WeakLearner>::Classify(
   cMatrix.zeros();
   predictedLabels.zeros();
 
-  for (int i = 0;i < wl.size(); i++)
+  for (size_t i = 0;i < wl.size(); i++)
   {
     wl[i].Classify(test, tempPredictedLabels);
 
-    for (int j = 0; j < tempPredictedLabels.n_cols; j++)
+    for (size_t j = 0; j < tempPredictedLabels.n_cols; j++)
       cMatrix(tempPredictedLabels(j), j) += (alpha[i] * tempPredictedLabels(j));
   }
   
   arma::colvec cMRow;
   arma::uword max_index;
 
-  for (int i = 0; i < predictedLabels.n_cols; i++)
+  for (size_t i = 0; i < predictedLabels.n_cols; i++)
   {
     cMRow = cMatrix.col(i);
     cMRow.max(max_index);
@@ -244,7 +244,7 @@ void AdaBoost<MatType, WeakLearner>::BuildWeightMatrix(
     const arma::mat& D,
     arma::rowvec& weights)
 {
-  int i, j;
+  size_t i, j;
   weights.fill(0.0);
 
   for (i = 0; i < D.n_rows; i++)
diff --git a/src/mlpack/methods/decision_stump/decision_stump_impl.hpp b/src/mlpack/methods/decision_stump/decision_stump_impl.hpp
index 24af7e1..5775653 100644
--- a/src/mlpack/methods/decision_stump/decision_stump_impl.hpp
+++ b/src/mlpack/methods/decision_stump/decision_stump_impl.hpp
@@ -55,7 +55,7 @@ void DecisionStump<MatType>::Train(const MatType& data, const arma::Row<size_t>&
       labels.subvec(0, labels.n_elem - 1), 0, weightD);
 
   double gain, bestGain = 0.0;
-  for (int i = 0; i < data.n_rows; i++)
+  for (size_t i = 0; i < data.n_rows; i++)
   {
     // Go through each attribute of the data.
     if (IsDistinct<double>(data.row(i)))
@@ -96,12 +96,12 @@ template<typename MatType>
 void DecisionStump<MatType>::Classify(const MatType& test,
                                       arma::Row<size_t>& predictedLabels)
 {
-  for (int i = 0; i < test.n_cols; i++)
+  for (size_t i = 0; i < test.n_cols; i++)
   {
     // Determine which bin the test point falls into.
     // Assume first that it falls into the first bin, then proceed through the
     // bins until it is known which bin it falls into.
-    int bin = 0;
+    size_t bin = 0;
     const double val = test(splitAttribute, i);
 
     while (bin < split.n_elem - 1)
@@ -160,7 +160,7 @@ double DecisionStump<MatType>::SetupSplitAttribute(
     const arma::Row<size_t>& labels,
     const arma::rowvec& weightD)
 {
-  int i, count, begin, end;
+  size_t i, count, begin, end;
   double entropy = 0.0;
 
   // Sort the attribute in order to calculate splitting ranges.
@@ -252,7 +252,7 @@ template <typename rType>
 void DecisionStump<MatType>::TrainOnAtt(const arma::rowvec& attribute,
                                         const arma::Row<size_t>& labels)
 {
-  int i, count, begin, end;
+  size_t i, count, begin, end;
 
   arma::rowvec sortedSplitAtt = arma::sort(attribute);
   arma::uvec sortedSplitIndexAtt = arma::stable_sort_index(attribute.t());
@@ -339,7 +339,7 @@ void DecisionStump<MatType>::TrainOnAtt(const arma::rowvec& attribute,
 template <typename MatType>
 void DecisionStump<MatType>::MergeRanges()
 {
-  for (int i = 1; i < split.n_rows; i++)
+  for (size_t i = 1; i < split.n_rows; i++)
   {
     if (binLabels(i) == binLabels(i - 1))
     {
@@ -365,7 +365,7 @@ rType DecisionStump<MatType>::CountMostFreq(const arma::Row<rType>& subCols)
     return sortCounts[0];
 
   // An O(n) loop which counts the most frequent element in sortCounts
-  for (int i = 0; i < sortCounts.n_elem; ++i)
+  for (size_t i = 0; i < sortCounts.n_elem; ++i)
   {
     if (i == sortCounts.n_elem - 1)
     {



More information about the mlpack-git mailing list