[mlpack-git] master: Use static weights for the network decreasing error test. (f5893d5)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Fri Sep 4 09:26:43 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/ef290321115f6dfd21522ba7ccec5f08b52d7631...f5893d5d190d5f5b4b6dc94e2593f50c56d406e4

>---------------------------------------------------------------

commit f5893d5d190d5f5b4b6dc94e2593f50c56d406e4
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Fri Sep 4 15:26:35 2015 +0200

    Use static weights for the network decreasing error test.


>---------------------------------------------------------------

f5893d5d190d5f5b4b6dc94e2593f50c56d406e4
 src/mlpack/tests/feedforward_network_test.cpp | 22 +++++++++++++---------
 1 file changed, 13 insertions(+), 9 deletions(-)

diff --git a/src/mlpack/tests/feedforward_network_test.cpp b/src/mlpack/tests/feedforward_network_test.cpp
index 33b083b..f8b964e 100644
--- a/src/mlpack/tests/feedforward_network_test.cpp
+++ b/src/mlpack/tests/feedforward_network_test.cpp
@@ -337,7 +337,6 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
  * evaluate the network.
  */
 template<
-    typename WeightInitRule,
     typename PerformanceFunction,
     typename OutputLayerType,
     typename PerformanceFunctionType,
@@ -372,12 +371,18 @@ void BuildNetworkOptimzer(MatType& trainData,
    * +-----+       +-----+
    */
 
-  LinearLayer<> inputLayer(trainData.n_rows, hiddenLayerSize);
-  BiasLayer<> inputBiasLayer(hiddenLayerSize);
+  RandomInitialization randInit(0.5, 0.5);
+
+  LinearLayer<RMSPROP, RandomInitialization> inputLayer(trainData.n_rows,
+      hiddenLayerSize, randInit);
+  BiasLayer<RMSPROP, RandomInitialization> inputBiasLayer(hiddenLayerSize,
+      1, randInit);
   BaseLayer<PerformanceFunction> inputBaseLayer;
 
-  LinearLayer<> hiddenLayer1(hiddenLayerSize, trainLabels.n_rows);
-  BiasLayer<> hiddenBiasLayer1(trainLabels.n_rows);
+  LinearLayer<RMSPROP, RandomInitialization> hiddenLayer1(hiddenLayerSize,
+      trainLabels.n_rows, randInit);
+  BiasLayer<RMSPROP, RandomInitialization> hiddenBiasLayer1(trainLabels.n_rows,
+      1, randInit);
   BaseLayer<PerformanceFunction> outputLayer;
 
   OutputLayerType classOutputLayer;
@@ -388,7 +393,7 @@ void BuildNetworkOptimzer(MatType& trainData,
   FFN<decltype(modules), OutputLayerType, PerformanceFunctionType>
       net(modules, classOutputLayer);
 
-  Trainer<decltype(net)> trainer(net, epochs, 1);
+  Trainer<decltype(net)> trainer(net, epochs, 1, 0.0001, false);
 
   double error = DBL_MAX;
   for (size_t i = 0; i < 5; i++)
@@ -420,11 +425,10 @@ BOOST_AUTO_TEST_CASE(NetworkDecreasingErrorTest)
   labels.submat(0, labels.n_cols / 2, 0, labels.n_cols - 1) += 1;
 
   // Vanilla neural net with logistic activation function.
-  BuildNetworkOptimzer<RandomInitialization,
-                       LogisticFunction,
+  BuildNetworkOptimzer<LogisticFunction,
                        BinaryClassificationLayer,
                        MeanSquaredErrorFunction>
-      (dataset, labels, dataset, labels, 30, 50);
+      (dataset, labels, dataset, labels, 20, 15);
 }
 
 BOOST_AUTO_TEST_SUITE_END();



More information about the mlpack-git mailing list