[mlpack-svn] r16797 - mlpack/trunk/src/mlpack/methods/decision_stump

fastlab-svn at coffeetalk-1.cc.gatech.edu fastlab-svn at coffeetalk-1.cc.gatech.edu
Wed Jul 9 15:25:51 EDT 2014


Author: rcurtin
Date: Wed Jul  9 15:25:51 2014
New Revision: 16797

Log:
Const-correctness and 80-character lines... very trivial fix, no functionality
change.


Modified:
   mlpack/trunk/src/mlpack/methods/decision_stump/decision_stump_impl.hpp

Modified: mlpack/trunk/src/mlpack/methods/decision_stump/decision_stump_impl.hpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/decision_stump/decision_stump_impl.hpp	(original)
+++ mlpack/trunk/src/mlpack/methods/decision_stump/decision_stump_impl.hpp	Wed Jul  9 15:25:51 2014
@@ -37,9 +37,9 @@
   // If classLabels are not all identical, proceed with training.
   int bestAtt = 0;
   double entropy;
-  double rootEntropy = CalculateEntropy<size_t>(labels.subvec(0,labels.n_elem-1));
-  // std::cout<<"rootEntropy is: "<<rootEntropy<<"\n";
-  // double bestEntropy = DBL_MAX;
+  const double rootEntropy = CalculateEntropy<size_t>(
+      labels.subvec(0, labels.n_elem - 1));
+
   double gain, bestGain = 0.0;
   for (int i = 0; i < data.n_rows; i++)
   {
@@ -56,12 +56,11 @@
       // maximized.
 
       // if (entropy < bestEntropy)
-      // Instead of the above rule, we are maximizing gain, which was 
+      // Instead of the above rule, we are maximizing gain, which was
       // what is returned from SetupSplitAttribute.
       if (gain < bestGain)
       {
         bestAtt = i;
-        // bestEntropy = entropy;
         bestGain = gain;
       }
     }
@@ -372,8 +371,8 @@
 {
   double entropy = 0.0;
   size_t j;
-  
-  arma::Row<size_t> numElem(numClass); 
+
+  arma::Row<size_t> numElem(numClass);
   numElem.fill(0);
 
   // Populate numElem; they are used as helpers to calculate
@@ -384,10 +383,10 @@
   for (j = 0; j < numClass; j++)
   {
     const double p1 = ((double) numElem(j) / labels.n_elem);
-  
+
     entropy += (p1 == 0) ? 0 : p1 * log2(p1);
   }
-  
+
   return entropy;
 }
 



More information about the mlpack-svn mailing list