[mlpack-git] master: Split biases into separate vector (speedup). (a961350)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Fri Sep 4 13:33:02 EDT 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/424383cb02dcf2d73728e1c3c4b582bdb7cba627...f3bd5e8853a795f4ff41849dd8ef844d53199412
>---------------------------------------------------------------
commit a96135093f1b526af16a6a583967e0b75e3d9f55
Author: Ryan Curtin <ryan at ratml.org>
Date: Fri Sep 4 16:35:33 2015 +0000
Split biases into separate vector (speedup).
>---------------------------------------------------------------
a96135093f1b526af16a6a583967e0b75e3d9f55
.../perceptron/initialization_methods/random_init.hpp | 6 ++++--
.../perceptron/initialization_methods/zero_init.hpp | 6 ++++--
.../perceptron/learning_policies/simple_weight_update.hpp | 11 +++++------
src/mlpack/methods/perceptron/perceptron.hpp | 6 +++++-
src/mlpack/methods/perceptron/perceptron_impl.hpp | 14 ++++++--------
5 files changed, 24 insertions(+), 19 deletions(-)
diff --git a/src/mlpack/methods/perceptron/initialization_methods/random_init.hpp b/src/mlpack/methods/perceptron/initialization_methods/random_init.hpp
index 7ec04df..ee94ca7 100644
--- a/src/mlpack/methods/perceptron/initialization_methods/random_init.hpp
+++ b/src/mlpack/methods/perceptron/initialization_methods/random_init.hpp
@@ -21,11 +21,13 @@ class RandomInitialization
public:
RandomInitialization() { }
- inline static void Initialize(arma::mat& W,
+ inline static void Initialize(arma::mat& weights,
+ arma::vec& biases,
const size_t numFeatures,
const size_t numClasses)
{
- W = arma::randu<arma::mat>(numFeatures, numClasses);
+ weights.randu(numFeatures, numClasses);
+ biases.randu(numClasses);
}
}; // class RandomInitialization
diff --git a/src/mlpack/methods/perceptron/initialization_methods/zero_init.hpp b/src/mlpack/methods/perceptron/initialization_methods/zero_init.hpp
index acea4a3..450f27d 100644
--- a/src/mlpack/methods/perceptron/initialization_methods/zero_init.hpp
+++ b/src/mlpack/methods/perceptron/initialization_methods/zero_init.hpp
@@ -20,11 +20,13 @@ class ZeroInitialization
public:
ZeroInitialization() { }
- inline static void Initialize(arma::mat& W,
+ inline static void Initialize(arma::mat& weights,
+ arma::vec& biases,
const size_t numFeatures,
const size_t numClasses)
{
- W = arma::zeros<arma::mat>(numFeatures, numClasses);
+ weights.zeros(numFeatures, numClasses);
+ biases.zeros(numClasses);
}
}; // class ZeroInitialization
diff --git a/src/mlpack/methods/perceptron/learning_policies/simple_weight_update.hpp b/src/mlpack/methods/perceptron/learning_policies/simple_weight_update.hpp
index 262c2cd..603cd11 100644
--- a/src/mlpack/methods/perceptron/learning_policies/simple_weight_update.hpp
+++ b/src/mlpack/methods/perceptron/learning_policies/simple_weight_update.hpp
@@ -39,18 +39,17 @@ class SimpleWeightUpdate
*/
void UpdateWeights(const arma::mat& trainData,
arma::mat& weightVectors,
+ arma::vec& biases,
const size_t labelIndex,
const size_t vectorIndex,
const size_t colIndex,
const arma::rowvec& D)
{
- weightVectors.col(colIndex).subvec(1, weightVectors.n_rows - 1) -=
- D(labelIndex) * trainData.col(labelIndex);
- weightVectors(colIndex, 0) -= D(labelIndex);
+ weightVectors.col(colIndex) -= D(labelIndex) * trainData.col(labelIndex);
+ biases(colIndex) -= D(labelIndex);
- weightVectors.col(vectorIndex).subvec(1, weightVectors.n_rows - 1) +=
- D(labelIndex) * trainData.col(labelIndex);
- weightVectors(vectorIndex, 0) += D(labelIndex);
+ weightVectors.col(vectorIndex) += D(labelIndex) * trainData.col(labelIndex);
+ biases(vectorIndex) += D(labelIndex);
}
};
diff --git a/src/mlpack/methods/perceptron/perceptron.hpp b/src/mlpack/methods/perceptron/perceptron.hpp
index b556684..d81d936 100644
--- a/src/mlpack/methods/perceptron/perceptron.hpp
+++ b/src/mlpack/methods/perceptron/perceptron.hpp
@@ -83,10 +83,14 @@ private:
/**
* Stores the weight vectors for each of the input class labels. Each column
* corresponds to the weights for one class label, and each row corresponds to
- * the weights for one dimension of the input data.
+ * the weights for one dimension of the input data. The biases are held in a
+ * separate vector.
*/
arma::mat weightVectors;
+ //! The biases for each class.
+ arma::vec biases;
+
/**
* Training Function. It trains on trainData using the cost matrix D
*
diff --git a/src/mlpack/methods/perceptron/perceptron_impl.hpp b/src/mlpack/methods/perceptron/perceptron_impl.hpp
index 1f4bf6c..ad34b83 100644
--- a/src/mlpack/methods/perceptron/perceptron_impl.hpp
+++ b/src/mlpack/methods/perceptron/perceptron_impl.hpp
@@ -34,7 +34,7 @@ Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Perceptron(
const int iterations)
{
WeightInitializationPolicy WIP;
- WIP.Initialize(weightVectors, data.n_rows + 1, arma::max(labels) + 1);
+ WIP.Initialize(weightVectors, biases, data.n_rows, arma::max(labels) + 1);
// Start training.
iter = iterations;
@@ -68,9 +68,7 @@ void Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Classify(
// Could probably be faster if done in batch.
for (size_t i = 0; i < test.n_cols; i++)
{
- tempLabelMat = weightVectors.submat(1, 0, weightVectors.n_rows - 1,
- weightVectors.n_cols - 1).t() *
- test.col(i) + weightVectors.row(0).t();
+ tempLabelMat = weightVectors.t() * test.col(i) + biases;
tempLabelMat.max(maxIndex);
predictedLabels(0, i) = maxIndex;
}
@@ -101,7 +99,7 @@ Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Perceptron(
// Insert a row of ones at the top of the training data set.
WeightInitializationPolicy WIP;
- WIP.Initialize(weightVectors, data.n_rows + 1, arma::max(labels) + 1);
+ WIP.Initialize(weightVectors, biases, data.n_rows, arma::max(labels) + 1);
Train(data, labels, D);
}
@@ -153,8 +151,7 @@ void Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Train(
{
// Multiply for each variable and check whether the current weight vector
// correctly classifies this.
- tempLabelMat = weightVectors.rows(1, weightVectors.n_rows - 1).t() *
- data.col(j) + weightVectors.row(0).t();
+ tempLabelMat = weightVectors.t() * data.col(j) + biases;
tempLabelMat.max(maxIndexRow, maxIndexCol);
@@ -167,7 +164,8 @@ void Perceptron<LearnPolicy, WeightInitializationPolicy, MatType>::Train(
// Send maxIndexRow for knowing which weight to update, send j to know
// the value of the vector to update it with. Send tempLabel to know
// the correct class.
- LP.UpdateWeights(data, weightVectors, j, tempLabel, maxIndexCol, D);
+ LP.UpdateWeights(data, weightVectors, biases, j, tempLabel, maxIndexCol,
+ D);
}
}
}
More information about the mlpack-git
mailing list