[mlpack-git] master: Adjust weight init methods. (2eb28bc)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Sun Apr 26 06:55:59 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/fbd6b1f878ec3b2fa365254a22daf3add743ee51...2eb28bcc2ada2fe09a7ad7073c0bbcbb96aac0c5

>---------------------------------------------------------------

commit 2eb28bcc2ada2fe09a7ad7073c0bbcbb96aac0c5
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Sat Apr 25 19:55:23 2015 +0200

    Adjust weight init methods.


>---------------------------------------------------------------

2eb28bcc2ada2fe09a7ad7073c0bbcbb96aac0c5
 .../methods/ann/connections/full_connection.hpp    |  2 +-
 .../ann/connections/fullself_connection.hpp        |  2 +-
 .../methods/ann/connections/self_connection.hpp    |  2 +-
 src/mlpack/methods/ann/layer/lstm_layer.hpp        |  2 +-
 src/mlpack/tests/feedforward_network_test.cpp      | 24 +++++++++++-----------
 src/mlpack/tests/init_rules_test.cpp               |  6 +++---
 src/mlpack/tests/recurrent_network_test.cpp        | 18 ++++++++--------
 7 files changed, 28 insertions(+), 28 deletions(-)

diff --git a/src/mlpack/methods/ann/connections/full_connection.hpp b/src/mlpack/methods/ann/connections/full_connection.hpp
index dcccb9b..953665b 100644
--- a/src/mlpack/methods/ann/connections/full_connection.hpp
+++ b/src/mlpack/methods/ann/connections/full_connection.hpp
@@ -29,7 +29,7 @@ template<
     typename InputLayerType,
     typename OutputLayerType,
     typename OptimizerType,
-    class WeightInitRule = NguyenWidrowInitialization<>,
+    class WeightInitRule = NguyenWidrowInitialization,
     typename MatType = arma::mat,
     typename VecType = arma::colvec
 >
diff --git a/src/mlpack/methods/ann/connections/fullself_connection.hpp b/src/mlpack/methods/ann/connections/fullself_connection.hpp
index 470be4e..1ad50b9 100644
--- a/src/mlpack/methods/ann/connections/fullself_connection.hpp
+++ b/src/mlpack/methods/ann/connections/fullself_connection.hpp
@@ -31,7 +31,7 @@ template<
     typename InputLayerType,
     typename OutputLayerType,
     typename OptimizerType,
-    class WeightInitRule = NguyenWidrowInitialization<>,
+    class WeightInitRule = NguyenWidrowInitialization,
     typename MatType = arma::mat,
     typename VecType = arma::colvec
 >
diff --git a/src/mlpack/methods/ann/connections/self_connection.hpp b/src/mlpack/methods/ann/connections/self_connection.hpp
index 3b67a8a..8dbe5e0 100644
--- a/src/mlpack/methods/ann/connections/self_connection.hpp
+++ b/src/mlpack/methods/ann/connections/self_connection.hpp
@@ -31,7 +31,7 @@ template<
     typename InputLayerType,
     typename OutputLayerType,
     typename OptimizerType,
-    class WeightInitRule = NguyenWidrowInitialization<>,
+    class WeightInitRule = NguyenWidrowInitialization,
     typename MatType = arma::mat,
     typename VecType = arma::colvec
 >
diff --git a/src/mlpack/methods/ann/layer/lstm_layer.hpp b/src/mlpack/methods/ann/layer/lstm_layer.hpp
index 921657c..7ec9300 100644
--- a/src/mlpack/methods/ann/layer/lstm_layer.hpp
+++ b/src/mlpack/methods/ann/layer/lstm_layer.hpp
@@ -36,7 +36,7 @@ template <
     class GateActivationFunction = LogisticFunction,
     class StateActivationFunction = TanhFunction,
     class OutputActivationFunction = TanhFunction,
-    class WeightInitRule = NguyenWidrowInitialization<>,
+    class WeightInitRule = NguyenWidrowInitialization,
     typename OptimizerType = SteepestDescent<>,
     typename MatType = arma::mat,
     typename VecType = arma::colvec
diff --git a/src/mlpack/tests/feedforward_network_test.cpp b/src/mlpack/tests/feedforward_network_test.cpp
index 5f67fbc..a4ce5e2 100644
--- a/src/mlpack/tests/feedforward_network_test.cpp
+++ b/src/mlpack/tests/feedforward_network_test.cpp
@@ -159,12 +159,12 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
   arma::mat testLabels = dataset.submat(dataset.n_rows - 3, 0,
       dataset.n_rows - 1, dataset.n_cols - 1);
 
-  RandomInitialization<> randInitA(1, 2);
+  RandomInitialization randInitA(1, 2);
 
   // Vanilla neural net with logistic activation function.
   // Because 92 percent of the patients are not hyperthyroid a the neural
   // network mst be significant better than 92%.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                       LogisticFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -174,7 +174,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
 
   dataset.load("mnist_first250_training_4s_and_9s.arm");
 
-  RandomInitialization<> randInitB(-0.5, 0.5);
+  RandomInitialization randInitB(-0.5, 0.5);
 
   // Normalize each point since these are images.
   for (size_t i = 0; i < dataset.n_cols; ++i)
@@ -184,7 +184,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
   labels.submat(0, labels.n_cols / 2, 0, labels.n_cols - 1).fill(1);
 
   // Vanilla neural net with logistic activation function.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                       LogisticFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -192,7 +192,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
       (dataset, labels, dataset, labels, 100, 100, 0.6, 10, randInitB);
 
   // Vanilla neural net with tanh activation function.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                     TanhFunction,
                     SteepestDescent<>,
                     BinaryClassificationLayer<>,
@@ -208,7 +208,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
   arma::mat input;
   arma::mat labels;
 
-  RandomInitialization<> randInit(0.5, 1);
+  RandomInitialization randInit(0.5, 1);
 
   // Test on a non-linearly separable dataset (XOR).
   input << 0 << 1 << 1 << 0 << arma::endr
@@ -216,7 +216,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
   labels << 0 << 0 << 1 << 1;
 
   // Vanilla neural net with logistic activation function.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                       LogisticFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -224,7 +224,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
       (input, labels, input, labels, 4, 0, 0, 0.01, randInit);
 
   // Vanilla neural net with tanh activation function.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                       TanhFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -237,7 +237,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
   labels << 0 << 0 << 1 << 0;
 
   // vanilla neural net with sigmoid activation function.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                     LogisticFunction,
                     SteepestDescent<>,
                     BinaryClassificationLayer<>,
@@ -245,7 +245,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
     (input, labels, input, labels, 4, 0, 0, 0.01, randInit);
 
   // Vanilla neural net with tanh activation function.
-  BuildVanillaNetwork<RandomInitialization<>,
+  BuildVanillaNetwork<RandomInitialization,
                       TanhFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -358,7 +358,7 @@ BOOST_AUTO_TEST_CASE(NetworkDecreasingErrorTest)
   arma::mat dataset;
   dataset.load("mnist_first250_training_4s_and_9s.arm");
 
-  RandomInitialization<> randInitB(-0.5, 0.5);
+  RandomInitialization randInitB(-0.5, 0.5);
 
   // Normalize each point since these are images.
   for (size_t i = 0; i < dataset.n_cols; ++i)
@@ -368,7 +368,7 @@ BOOST_AUTO_TEST_CASE(NetworkDecreasingErrorTest)
   labels.submat(0, labels.n_cols / 2, 0, labels.n_cols - 1) += 1;
 
   // Vanilla neural net with logistic activation function.
-  BuildNetworkOptimzer<RandomInitialization<>,
+  BuildNetworkOptimzer<RandomInitialization,
                        LogisticFunction,
                        SteepestDescent<>,
                        BinaryClassificationLayer<>,
diff --git a/src/mlpack/tests/init_rules_test.cpp b/src/mlpack/tests/init_rules_test.cpp
index b986452..8c89d7b 100644
--- a/src/mlpack/tests/init_rules_test.cpp
+++ b/src/mlpack/tests/init_rules_test.cpp
@@ -25,7 +25,7 @@ BOOST_AUTO_TEST_SUITE(InitRulesTest);
 BOOST_AUTO_TEST_CASE(ConstantInitTest)
 {
   arma::mat weights;
-  RandomInitialization<> constantInit(1, 1);
+  RandomInitialization constantInit(1, 1);
   constantInit.Initialize(weights, 100, 100);
 
   bool b = arma::all(arma::vectorise(weights) == 1);
@@ -36,7 +36,7 @@ BOOST_AUTO_TEST_CASE(ConstantInitTest)
 BOOST_AUTO_TEST_CASE(OrthogonalInitTest)
 {
   arma::mat weights;
-  OrthogonalInitialization<> orthogonalInit;
+  OrthogonalInitialization orthogonalInit;
   orthogonalInit.Initialize(weights, 100, 200);
 
   arma::mat orthogonalWeights = arma::eye<arma::mat>(100, 100);
@@ -60,7 +60,7 @@ BOOST_AUTO_TEST_CASE(OrthogonalInitGainTest)
   arma::mat weights;
 
   const double gain = 2;
-  OrthogonalInitialization<> orthogonalInit(gain);
+  OrthogonalInitialization orthogonalInit(gain);
   orthogonalInit.Initialize(weights, 100, 200);
 
   arma::mat orthogonalWeights = arma::eye<arma::mat>(100, 100);
diff --git a/src/mlpack/tests/recurrent_network_test.cpp b/src/mlpack/tests/recurrent_network_test.cpp
index dcfa338..e29face 100644
--- a/src/mlpack/tests/recurrent_network_test.cpp
+++ b/src/mlpack/tests/recurrent_network_test.cpp
@@ -119,7 +119,7 @@ BOOST_AUTO_TEST_CASE(SequenceClassificationTest)
   SteepestDescent< > conOptimizer3(hiddenLayer0.InputSize(),
       hiddenLayer1.OutputSize(), 1, 0);
 
-  NguyenWidrowInitialization<> randInit;
+  NguyenWidrowInitialization randInit;
 
   FullConnection<
       decltype(inputLayer),
@@ -316,7 +316,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
   arma::mat input;
   arma::mat labels;
 
-  RandomInitialization<> randInit(1, 1);
+  RandomInitialization randInit(1, 1);
 
   // Test on a non-linearly separable dataset (XOR).
   input << 0 << 1 << 1 << 0 << arma::endr
@@ -324,7 +324,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
   labels << 0 << 0 << 1 << 1;
 
   // Vanilla neural net with logistic activation function.
-  CompareVanillaNetworks<RandomInitialization<>,
+  CompareVanillaNetworks<RandomInitialization,
                       LogisticFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -332,7 +332,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
       (input, labels, input, labels, 10, 10, randInit);
 
   // Vanilla neural net with identity activation function.
-  CompareVanillaNetworks<RandomInitialization<>,
+  CompareVanillaNetworks<RandomInitialization,
                       IdentityFunction,
                       SteepestDescent<>,
                       BinaryClassificationLayer<>,
@@ -340,7 +340,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
       (input, labels, input, labels, 1, 1, randInit);
 
   // Vanilla neural net with rectifier activation function.
-  CompareVanillaNetworks<RandomInitialization<>,
+  CompareVanillaNetworks<RandomInitialization,
                     RectifierFunction,
                     SteepestDescent<>,
                     BinaryClassificationLayer<>,
@@ -348,7 +348,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
     (input, labels, input, labels, 10, 10, randInit);
 
   // Vanilla neural net with softsign activation function.
-  CompareVanillaNetworks<RandomInitialization<>,
+  CompareVanillaNetworks<RandomInitialization,
                     SoftsignFunction,
                     SteepestDescent<>,
                     BinaryClassificationLayer<>,
@@ -356,7 +356,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
     (input, labels, input, labels, 10, 10, randInit);
 
   // Vanilla neural net with tanh activation function.
-  CompareVanillaNetworks<RandomInitialization<>,
+  CompareVanillaNetworks<RandomInitialization,
                     TanhFunction,
                     SteepestDescent<>,
                     BinaryClassificationLayer<>,
@@ -585,7 +585,7 @@ void ReberGrammarTestNetwork(HiddenLayerType& hiddenLayer0,
   SteepestDescent< > conOptimizer3(hiddenLayer0.OutputSize(),
       hiddenLayer1.InputSize(), 0.1);
 
-  NguyenWidrowInitialization<> randInit;
+  NguyenWidrowInitialization randInit;
 
   FullConnection<
       decltype(inputLayer),
@@ -820,7 +820,7 @@ void DistractedSequenceRecallTestNetwork(HiddenLayerType& hiddenLayer0)
   SteepestDescent< > conOptimizer3(hiddenLayer0.OutputSize(),
       hiddenLayer1.InputSize(), 0.1);
 
-  NguyenWidrowInitialization<> randInit;
+  NguyenWidrowInitialization randInit;
 
   FullConnection<
       decltype(inputLayer),



More information about the mlpack-git mailing list