[mlpack-git] master: Refactor AdaBoost constructor to allow default parameters. (293241e)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Mon Nov 30 17:24:36 EST 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/10b9d45b806a3e879b0564d78ccb183ebc7051ba...31c557d9cc7e4da57fd8a246085c19e076d12271
>---------------------------------------------------------------
commit 293241e6a67b40ccbbf4896a93efb46138bad1b7
Author: Ryan Curtin <ryan at ratml.org>
Date: Mon Nov 30 21:32:54 2015 +0000
Refactor AdaBoost constructor to allow default parameters.
>---------------------------------------------------------------
293241e6a67b40ccbbf4896a93efb46138bad1b7
src/mlpack/methods/adaboost/adaboost.hpp | 6 +-
src/mlpack/methods/adaboost/adaboost_impl.hpp | 8 +-
src/mlpack/methods/adaboost/adaboost_main.cpp | 6 +-
src/mlpack/tests/adaboost_test.cpp | 131 +++++++++++++-------------
4 files changed, 73 insertions(+), 78 deletions(-)
diff --git a/src/mlpack/methods/adaboost/adaboost.hpp b/src/mlpack/methods/adaboost/adaboost.hpp
index 320982d..924e2e2 100644
--- a/src/mlpack/methods/adaboost/adaboost.hpp
+++ b/src/mlpack/methods/adaboost/adaboost.hpp
@@ -90,9 +90,9 @@ class AdaBoost
*/
AdaBoost(const MatType& data,
const arma::Row<size_t>& labels,
- const int iterations,
- const double tol,
- const WeakLearner& other);
+ const WeakLearner& other,
+ const size_t iterations = 100,
+ const double tolerance = 1e-6);
// Stores the final classification of the labels.
arma::Row<size_t> finalHypothesis;
diff --git a/src/mlpack/methods/adaboost/adaboost_impl.hpp b/src/mlpack/methods/adaboost/adaboost_impl.hpp
index 367c975..3fc8a21 100644
--- a/src/mlpack/methods/adaboost/adaboost_impl.hpp
+++ b/src/mlpack/methods/adaboost/adaboost_impl.hpp
@@ -39,9 +39,9 @@ template<typename MatType, typename WeakLearner>
AdaBoost<MatType, WeakLearner>::AdaBoost(
const MatType& data,
const arma::Row<size_t>& labels,
- const int iterations,
- const double tol,
- const WeakLearner& other)
+ const WeakLearner& other,
+ const size_t iterations,
+ const double tol)
{
// Count the number of classes.
classes = (arma::max(labels) - arma::min(labels)) + 1;
@@ -75,7 +75,7 @@ AdaBoost<MatType, WeakLearner>::AdaBoost(
arma::Row<size_t> finalH(predictedLabels.n_cols);
// Now, start the boosting rounds.
- for (int i = 0; i < iterations; i++)
+ for (size_t i = 0; i < iterations; i++)
{
// Initialized to zero in every round. rt is used for calculation of
// alphat; it is the weighted error.
diff --git a/src/mlpack/methods/adaboost/adaboost_main.cpp b/src/mlpack/methods/adaboost/adaboost_main.cpp
index a439563..f456d84 100644
--- a/src/mlpack/methods/adaboost/adaboost_main.cpp
+++ b/src/mlpack/methods/adaboost/adaboost_main.cpp
@@ -119,16 +119,16 @@ int main(int argc, char *argv[])
Log::Fatal << "Test data dimensionality (" << testingData.n_rows << ") "
<< "must be the same as training data (" << trainingData.n_rows - 1
<< ")!" << std::endl;
- int iterations = CLI::GetParam<int>("iterations");
+ size_t iterations = (size_t) CLI::GetParam<int>("iterations");
// define your own weak learner, perceptron in this case.
// defining the number of iterations of the perceptron.
- int iter = 400;
+ size_t iter = 400;
perceptron::Perceptron<> p(trainingData, labels.t(), max(labels) + 1, iter);
Timer::Start("Training");
- AdaBoost<> a(trainingData, labels.t(), iterations, tolerance, p);
+ AdaBoost<> a(trainingData, labels.t(), p, iterations, tolerance);
Timer::Stop("Training");
Row<size_t> predictedLabels(testingData.n_cols);
diff --git a/src/mlpack/tests/adaboost_test.cpp b/src/mlpack/tests/adaboost_test.cpp
index de6c582..fa3999c 100644
--- a/src/mlpack/tests/adaboost_test.cpp
+++ b/src/mlpack/tests/adaboost_test.cpp
@@ -40,9 +40,9 @@ BOOST_AUTO_TEST_CASE(HammingLossBoundIris)
perceptronIter);
// Define parameters for AdaBoost.
- int iterations = 100;
+ size_t iterations = 100;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
int countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
@@ -80,16 +80,16 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorIris)
perceptronIter);
p.Classify(inputData, perceptronPrediction);
- int countWeakLearnerError = 0;
+ size_t countWeakLearnerError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != perceptronPrediction(i))
countWeakLearnerError++;
double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
// Define parameters for AdaBoost.
- int iterations = 100;
+ size_t iterations = 100;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
int countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
@@ -116,16 +116,15 @@ BOOST_AUTO_TEST_CASE(HammingLossBoundVertebralColumn)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptronIter iterations.
- int perceptronIter = 800;
-
+ size_t perceptronIter = 800;
perceptron::Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1,
perceptronIter);
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
- int countError = 0;
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -152,24 +151,24 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorVertebralColumn)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptronIter iterations.
- int perceptronIter = 800;
+ size_t perceptronIter = 800;
arma::Row<size_t> perceptronPrediction(labels.n_cols);
perceptron::Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1,
perceptronIter);
p.Classify(inputData, perceptronPrediction);
- int countWeakLearnerError = 0;
+ size_t countWeakLearnerError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != perceptronPrediction(i))
countWeakLearnerError++;
double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
- int countError = 0;
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if(labels(i) != a.finalHypothesis(i))
countError++;
@@ -195,16 +194,15 @@ BOOST_AUTO_TEST_CASE(HammingLossBoundNonLinearSepData)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptronIter iterations.
- int perceptronIter = 800;
-
+ size_t perceptronIter = 800;
perceptron::Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1,
perceptronIter);
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
- int countError = 0;
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -231,24 +229,24 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorNonLinearSepData)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptronIter iterations.
- int perceptronIter = 800;
+ size_t perceptronIter = 800;
arma::Row<size_t> perceptronPrediction(labels.n_cols);
perceptron::Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1,
perceptronIter);
p.Classify(inputData, perceptronPrediction);
- int countWeakLearnerError = 0;
+ size_t countWeakLearnerError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
- if(labels(i) != perceptronPrediction(i))
+ if (labels(i) != perceptronPrediction(i))
countWeakLearnerError++;
double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
- int countError = 0;
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -273,19 +271,17 @@ BOOST_AUTO_TEST_CASE(HammingLossIris_DS)
BOOST_FAIL("Cannot load labels for iris_labels.txt");
// Define your own weak learner, decision stumps in this case.
-
- // Define parameters for AdaBoost.
const size_t numClasses = 3;
const size_t inpBucketSize = 6;
-
decision_stump::DecisionStump<> ds(inputData, labels.row(0),
numClasses, inpBucketSize);
- int iterations = 50;
- double tolerance = 1e-10;
+ // Define parameters for AdaBoost.
+ size_t iterations = 50;
+ double tolerance = 1e-10;
AdaBoost<arma::mat, decision_stump::DecisionStump<>> a(inputData,
- labels.row(0), iterations, tolerance, ds);
- int countError = 0;
+ labels.row(0), ds, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -314,29 +310,28 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorIris_DS)
// no need to map the labels here
// Define your own weak learner, decision stumps in this case.
-
const size_t numClasses = 3;
const size_t inpBucketSize = 6;
arma::Row<size_t> dsPrediction(labels.n_cols);
- decision_stump::DecisionStump<> ds(inputData, labels.row(0),
- numClasses, inpBucketSize);
+ decision_stump::DecisionStump<> ds(inputData, labels.row(0), numClasses,
+ inpBucketSize);
ds.Classify(inputData, dsPrediction);
- int countWeakLearnerError = 0;
+ size_t countWeakLearnerError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if(labels(i) != dsPrediction(i))
countWeakLearnerError++;
double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
- AdaBoost<arma::mat, decision_stump::DecisionStump<> > a(inputData,
- labels.row(0), iterations, tolerance, ds);
- int countError = 0;
+ AdaBoost<arma::mat, decision_stump::DecisionStump<>> a(inputData,
+ labels.row(0), ds, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -344,6 +339,7 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorIris_DS)
BOOST_REQUIRE(error <= weakLearnerErrorRate);
}
+
/**
* This test case runs the AdaBoost.mh algorithm on the UCI Vertebral Column
* dataset. It checks if the error returned by running a single instance of the
@@ -368,12 +364,12 @@ BOOST_AUTO_TEST_CASE(HammingLossBoundVertebralColumn_DS)
numClasses, inpBucketSize);
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
AdaBoost<arma::mat, decision_stump::DecisionStump<>> a(inputData,
- labels.row(0), iterations, tolerance, ds);
- int countError = 0;
+ labels.row(0), ds, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -407,7 +403,7 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorVertebralColumn_DS)
decision_stump::DecisionStump<> ds(inputData, labels.row(0), numClasses,
inpBucketSize);
- int countWeakLearnerError = 0;
+ size_t countWeakLearnerError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != dsPrediction(i))
countWeakLearnerError++;
@@ -415,12 +411,12 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorVertebralColumn_DS)
double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
AdaBoost<arma::mat, decision_stump::DecisionStump<>> a(inputData,
- labels.row(0), iterations, tolerance, ds);
+ labels.row(0), ds, iterations, tolerance);
- int countError = 0;
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -452,12 +448,12 @@ BOOST_AUTO_TEST_CASE(HammingLossBoundNonLinearSepData_DS)
numClasses, inpBucketSize);
// Define parameters for Adaboost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
AdaBoost<arma::mat, mlpack::decision_stump::DecisionStump<> > a(inputData,
- labels.row(0), iterations, tolerance, ds);
- int countError = 0;
+ labels.row(0), ds, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -492,19 +488,19 @@ BOOST_AUTO_TEST_CASE(WeakLearnerErrorNonLinearSepData_DS)
decision_stump::DecisionStump<> ds(inputData, labels.row(0),
numClasses, inpBucketSize);
- int countWeakLearnerError = 0;
+ size_t countWeakLearnerError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
- if(labels(i) != dsPrediction(i))
+ if (labels(i) != dsPrediction(i))
countWeakLearnerError++;
double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols;
// Define parameters for AdaBoost.
- int iterations = 500;
+ size_t iterations = 500;
double tolerance = 1e-23;
AdaBoost<arma::mat, mlpack::decision_stump::DecisionStump<> > a(inputData,
- labels.row(0), iterations, tolerance, ds);
- int countError = 0;
+ labels.row(0), ds, iterations, tolerance);
+ size_t countError = 0;
for (size_t i = 0; i < labels.n_cols; i++)
if (labels(i) != a.finalHypothesis(i))
countError++;
@@ -531,7 +527,7 @@ BOOST_AUTO_TEST_CASE(ClassifyTest_VERTEBRALCOL)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptronIter iterations.
- int perceptronIter = 1000;
+ size_t perceptronIter = 1000;
arma::mat testData;
@@ -549,15 +545,14 @@ BOOST_AUTO_TEST_CASE(ClassifyTest_VERTEBRALCOL)
p.Classify(inputData, perceptronPrediction);
// Define parameters for AdaBoost.
- int iterations = 100;
+ size_t iterations = 100;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
arma::Row<size_t> predictedLabels(testData.n_cols);
a.Classify(testData, predictedLabels);
- int localError = 0;
-
+ size_t localError = 0;
for (size_t i = 0; i < trueTestLabels.n_cols; i++)
if (trueTestLabels(i) != predictedLabels(i))
localError++;
@@ -602,15 +597,15 @@ BOOST_AUTO_TEST_CASE(ClassifyTest_NONLINSEP)
numClasses, inpBucketSize);
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
AdaBoost<arma::mat, mlpack::decision_stump::DecisionStump<> > a(
- inputData, labels.row(0), iterations, tolerance, ds);
+ inputData, labels.row(0), ds, iterations, tolerance);
arma::Row<size_t> predictedLabels(testData.n_cols);
a.Classify(testData, predictedLabels);
- int localError = 0;
+ size_t localError = 0;
for (size_t i = 0; i < trueTestLabels.n_cols; i++)
if (trueTestLabels(i) != predictedLabels(i))
localError++;
@@ -638,15 +633,15 @@ BOOST_AUTO_TEST_CASE(ClassifyTest_IRIS)
// Define your own weak learner, perceptron in this case.
// Run the perceptron for perceptronIter iterations.
- int perceptronIter = 800;
+ size_t perceptronIter = 800;
perceptron::Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1,
perceptronIter);
// Define parameters for AdaBoost.
- int iterations = 50;
+ size_t iterations = 50;
double tolerance = 1e-10;
- AdaBoost<> a(inputData, labels.row(0), iterations, tolerance, p);
+ AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance);
arma::mat testData;
if (!data::Load("iris_test.csv", inputData))
@@ -660,7 +655,7 @@ BOOST_AUTO_TEST_CASE(ClassifyTest_IRIS)
if (!data::Load("iris_test_labels.csv", inputData))
BOOST_FAIL("Cannot load test dataset iris_test_labels.csv!");
- int localError = 0;
+ size_t localError = 0;
for (size_t i = 0; i < trueTestLabels.n_cols; i++)
if (trueTestLabels(i) != predictedLabels(i))
localError++;
More information about the mlpack-git
mailing list