[mlpack-svn] r16832 - in mlpack/trunk/src/mlpack/methods: . adaboost perceptron
fastlab-svn at coffeetalk-1.cc.gatech.edu
fastlab-svn at coffeetalk-1.cc.gatech.edu
Wed Jul 16 14:26:18 EDT 2014
Author: saxena.udit
Date: Wed Jul 16 14:26:17 2014
New Revision: 16832
Log:
Adaboost and Perceptron modified (improved constructor), going for tests on one Weak L
Modified:
mlpack/trunk/src/mlpack/methods/CMakeLists.txt
mlpack/trunk/src/mlpack/methods/adaboost/adaboost.hpp
mlpack/trunk/src/mlpack/methods/adaboost/adaboost_impl.hpp
mlpack/trunk/src/mlpack/methods/adaboost/adaboost_main.cpp
mlpack/trunk/src/mlpack/methods/perceptron/perceptron.hpp
mlpack/trunk/src/mlpack/methods/perceptron/perceptron_impl.hpp
mlpack/trunk/src/mlpack/methods/perceptron/perceptron_main.cpp
Modified: mlpack/trunk/src/mlpack/methods/CMakeLists.txt
==============================================================================
--- mlpack/trunk/src/mlpack/methods/CMakeLists.txt (original)
+++ mlpack/trunk/src/mlpack/methods/CMakeLists.txt Wed Jul 16 14:26:17 2014
@@ -1,6 +1,6 @@
# Recurse into each method mlpack provides.
set(DIRS
-# adaboost
+# adaboost
amf
cf
decision_stump
Modified: mlpack/trunk/src/mlpack/methods/adaboost/adaboost.hpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/adaboost/adaboost.hpp (original)
+++ mlpack/trunk/src/mlpack/methods/adaboost/adaboost.hpp Wed Jul 16 14:26:17 2014
@@ -9,7 +9,7 @@
#define _MLPACK_METHODS_ADABOOST_ADABOOST_HPP
#include <mlpack/core.hpp>
-#include "../perceptron/main/perceptron.hpp"
+#include <mlpack/methods/perceptron/perceptron.hpp>
namespace mlpack {
namespace adaboost {
Modified: mlpack/trunk/src/mlpack/methods/adaboost/adaboost_impl.hpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/adaboost/adaboost_impl.hpp (original)
+++ mlpack/trunk/src/mlpack/methods/adaboost/adaboost_impl.hpp Wed Jul 16 14:26:17 2014
@@ -20,6 +20,9 @@
{
int j, i;
+ // note: put a fail safe for classes or remove it entirely
+ // by using unique function.
+
// load the initial weights
const double initWeight = 1 / (data.n_cols * classes);
@@ -34,14 +37,9 @@
{
rt = 0.0;
zt = 0.0;
-
- //transform data, as per rules for perceptron
- for (j = 0;j < tempData.n_cols;j++)
- tempData.col(i) = D(i) * tempData.col(i);
-
- // for now, perceptron initialized with default parameters
- //mlpack::perceptron::Perceptron<> p(tempData, labels, 1000);
- WeakLearner w(other);
+
+ // call the other weak learner and train the labels.
+ WeakLearner w(other, tempData, D, labels);
w.Classify(tempData, predictedLabels);
// Now, start calculation of alpha(t)
Modified: mlpack/trunk/src/mlpack/methods/adaboost/adaboost_main.cpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/adaboost/adaboost_main.cpp (original)
+++ mlpack/trunk/src/mlpack/methods/adaboost/adaboost_main.cpp Wed Jul 16 14:26:17 2014
@@ -11,6 +11,7 @@
using namespace mlpack;
using namespace std;
using namespace arma;
+using namespace mlpack::adaboost;
PROGRAM_INFO("","");
@@ -25,7 +26,7 @@
" will be written.", "o", "output.csv");
PARAM_INT("iterations","The maximum number of boosting iterations "
"to be run", "i", 1000);
-PARAM_INT("classes","The number of classes in the input label set.","c");
+PARAM_INT_REQ("classes","The number of classes in the input label set.","c");
int main(int argc, char *argv[])
{
@@ -38,7 +39,26 @@
const string labelsFilename = CLI::GetParam<string>("labels_file");
// Load labels.
mat labelsIn;
- data::Load(labelsFilename, labelsIn, true);
+ // data::Load(labelsFilename, labelsIn, true);
+
+ if (CLI::HasParam("labels_file"))
+ {
+ const string labelsFilename = CLI::GetParam<string>("labels_file");
+ // Load labels.
+ data::Load(labelsFilename, labelsIn, true);
+
+ // Do the labels need to be transposed?
+ if (labelsIn.n_rows == 1)
+ labelsIn = labelsIn.t();
+ }
+ else
+ {
+ // Extract the labels as the last
+ Log::Info << "Using the last dimension of training set as labels." << endl;
+
+ labelsIn = trainingData.row(trainingData.n_rows - 1).t();
+ trainingData.shed_row(trainingData.n_rows - 1);
+ }
// helpers for normalizing the labels
Col<size_t> labels;
@@ -61,15 +81,21 @@
<< ")!" << std::endl;
int iterations = CLI::GetParam<int>("iterations");
+ int classes = 6;
+
+ // define your own weak learner, perceptron in this case.
+ int iter = 1000;
+ perceptron::Perceptron<> p(trainingData, labels, iter);
+ //
Timer::Start("Training");
- Adaboost<> a(trainingData, labels, iterations, classes);
+ Adaboost<> a(trainingData, labels, iterations, classes, p);
Timer::Stop("Training");
- vec results;
- data::RevertLabels(predictedLabels, mappings, results);
+ // vec results;
+ // data::RevertLabels(predictedLabels, mappings, results);
- const string outputFilename = CLI::GetParam<string>("output");
- data::Save(outputFilename, results, true, true);
+ // const string outputFilename = CLI::GetParam<string>("output");
+ // data::Save(outputFilename, results, true, true);
return 0;
}
\ No newline at end of file
Modified: mlpack/trunk/src/mlpack/methods/perceptron/perceptron.hpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/perceptron/perceptron.hpp (original)
+++ mlpack/trunk/src/mlpack/methods/perceptron/perceptron.hpp Wed Jul 16 14:26:17 2014
@@ -58,16 +58,12 @@
*
*
*/
- Perceptron(const Perceptron<>& p);
+ Perceptron(const Perceptron<>& other, MatType& data, const arma::Row<double>& D, const arma::Row<size_t>& labels);
- /**
- *
- *
- *
- *
- ModifyData(MatType& data, const arma::Row<double>& D);
- */
private:
+ //! To store the number of iterations
+ size_t iter;
+
//! Stores the class labels for the input data.
arma::Row<size_t> classLabels;
@@ -76,6 +72,12 @@
//! Stores the training data to be used later on in UpdateWeights.
arma::mat trainData;
+
+ /**
+ * Train function.
+ *
+ */
+ void Train();
};
} // namespace perceptron
Modified: mlpack/trunk/src/mlpack/methods/perceptron/perceptron_impl.hpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/perceptron/perceptron_impl.hpp (original)
+++ mlpack/trunk/src/mlpack/methods/perceptron/perceptron_impl.hpp Wed Jul 16 14:26:17 2014
@@ -45,45 +45,11 @@
zOnes.fill(1);
trainData.insert_rows(0, zOnes);
- int j, i = 0;
- bool converged = false;
- size_t tempLabel;
- arma::uword maxIndexRow, maxIndexCol;
- arma::mat tempLabelMat;
-
- LearnPolicy LP;
-
- while ((i < iterations) && (!converged))
- {
- // This outer loop is for each iteration, and we use the 'converged'
- // variable for noting whether or not convergence has been reached.
- i++;
- converged = true;
-
- // Now this inner loop is for going through the dataset in each iteration.
- for (j = 0; j < data.n_cols; j++)
- {
- // Multiply for each variable and check whether the current weight vector
- // correctly classifies this.
- tempLabelMat = weightVectors * trainData.col(j);
-
- tempLabelMat.max(maxIndexRow, maxIndexCol);
-
- // Check whether prediction is correct.
- if (maxIndexRow != classLabels(0, j))
- {
- // Due to incorrect prediction, convergence set to false.
- converged = false;
- tempLabel = labels(0, j);
- // Send maxIndexRow for knowing which weight to update, send j to know
- // the value of the vector to update it with. Send tempLabel to know
- // the correct class.
- LP.UpdateWeights(trainData, weightVectors, j, tempLabel, maxIndexRow);
- }
- }
- }
+ iter = iterations;
+ Train();
}
+
/**
* Classification function. After training, use the weightVectors matrix to
* classify test, and put the predicted classes in predictedLabels.
@@ -112,24 +78,70 @@
template <typename LearnPolicy, typename WeightInitializationPolicy, typename MatType>
Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Perceptron(
- const Perceptron<>& p)
+ const Perceptron<>& other, MatType& data, const arma::Row<double>& D, const arma::Row<size_t>& labels)
{
- classLabels = p.classLabels;
-
- weightVectors = p.weightVectors;
+ int i;
+ //transform data, as per rules for perceptron
+ for (i = 0;i < data.n_cols; i++)
+ data.col(i) = D(i) * data.col(i);
+
+ classLabels = labels;
+ trainData = data;
+ iter = other.iter;
- trainData = p.trainData;
+ Train();
}
-/*
-template <typename LearnPolicy, typename WeightInitializationPolicy, typename MatType>
-Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::ModifyData(
- MatType& data, const arma::Row<double>& D)
+/**
+ * Training Function.
+ *
+ */
+template<
+ typename LearnPolicy,
+ typename WeightInitializationPolicy,
+ typename MatType
+>
+void Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Train()
{
- for (int j = 0;j < data.n_cols;j++)
- data.col(i) = D(i) * data.col(i);
+ int j, i = 0;
+ bool converged = false;
+ size_t tempLabel;
+ arma::uword maxIndexRow, maxIndexCol;
+ arma::mat tempLabelMat;
+
+ LearnPolicy LP;
+
+ while ((i < iter) && (!converged))
+ {
+ // This outer loop is for each iteration, and we use the 'converged'
+ // variable for noting whether or not convergence has been reached.
+ i++;
+ converged = true;
+
+ // Now this inner loop is for going through the dataset in each iteration.
+ for (j = 0; j < trainData.n_cols; j++)
+ {
+ // Multiply for each variable and check whether the current weight vector
+ // correctly classifies this.
+ tempLabelMat = weightVectors * trainData.col(j);
+
+ tempLabelMat.max(maxIndexRow, maxIndexCol);
+
+ // Check whether prediction is correct.
+ if (maxIndexRow != classLabels(0, j))
+ {
+ // Due to incorrect prediction, convergence set to false.
+ converged = false;
+ tempLabel = classLabels(0, j);
+ // Send maxIndexRow for knowing which weight to update, send j to know
+ // the value of the vector to update it with. Send tempLabel to know
+ // the correct class.
+ LP.UpdateWeights(trainData, weightVectors, j, tempLabel, maxIndexRow);
+ }
+ }
+ }
}
-*/
+
}; // namespace perceptron
}; // namespace mlpack
Modified: mlpack/trunk/src/mlpack/methods/perceptron/perceptron_main.cpp
==============================================================================
--- mlpack/trunk/src/mlpack/methods/perceptron/perceptron_main.cpp (original)
+++ mlpack/trunk/src/mlpack/methods/perceptron/perceptron_main.cpp Wed Jul 16 14:26:17 2014
@@ -38,8 +38,8 @@
// Necessary parameters
PARAM_STRING_REQ("train_file", "A file containing the training set.", "t");
-PARAM_STRING_REQ("labels_file", "A file containing labels for the training set.",
- "l");
+PARAM_STRING("labels_file", "A file containing labels for the training set.",
+ "l","");
PARAM_STRING_REQ("test_file", "A file containing the test set.", "T");
// Optional parameters.
More information about the mlpack-svn
mailing list