[mlpack-git] master: Refactor feedforward network test for new network API. (da50cb5)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Sat Aug 29 08:23:21 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/ea45ace1ff744390a4c35183528eda881eda5c61...fd336238de224ed72fc23b84e1e2f02ae3c879d6

>---------------------------------------------------------------

commit da50cb51fbf588fc1cd03cb9107a2388b1c1fabc
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Tue Aug 18 14:45:51 2015 +0200

    Refactor feedforward network test for new network API.


>---------------------------------------------------------------

da50cb51fbf588fc1cd03cb9107a2388b1c1fabc
 src/mlpack/tests/feedforward_network_test.cpp | 154 +++++++++-----------------
 1 file changed, 51 insertions(+), 103 deletions(-)

diff --git a/src/mlpack/tests/feedforward_network_test.cpp b/src/mlpack/tests/feedforward_network_test.cpp
index 6fdbd01..d80f915 100644
--- a/src/mlpack/tests/feedforward_network_test.cpp
+++ b/src/mlpack/tests/feedforward_network_test.cpp
@@ -11,22 +11,15 @@
 
 #include <mlpack/methods/ann/init_rules/random_init.hpp>
 
-#include <mlpack/methods/ann/layer/neuron_layer.hpp>
 #include <mlpack/methods/ann/layer/bias_layer.hpp>
+#include <mlpack/methods/ann/layer/linear_layer.hpp>
+#include <mlpack/methods/ann/layer/base_layer.hpp>
 #include <mlpack/methods/ann/layer/dropout_layer.hpp>
 #include <mlpack/methods/ann/layer/binary_classification_layer.hpp>
 
-#include <mlpack/methods/ann/connections/full_connection.hpp>
-#include <mlpack/methods/ann/connections/identity_connection.hpp>
-
 #include <mlpack/methods/ann/trainer/trainer.hpp>
-
-#include <mlpack/methods/ann/ffnn.hpp>
-
+#include <mlpack/methods/ann/ffn.hpp>
 #include <mlpack/methods/ann/performance_functions/mse_function.hpp>
-#include <mlpack/methods/ann/performance_functions/sse_function.hpp>
-#include <mlpack/methods/ann/performance_functions/cee_function.hpp>
-
 #include <mlpack/methods/ann/optimizer/rmsprop.hpp>
 
 #include <boost/test/unit_test.hpp>
@@ -78,35 +71,23 @@ void BuildVanillaNetwork(MatType& trainData,
    * |     |
    * +-----+
    */
-  BiasLayer<> biasLayer0(1);
 
-  NeuronLayer<PerformanceFunction> inputLayer(trainData.n_rows);
-  NeuronLayer<PerformanceFunction> hiddenLayer0(hiddenLayerSize);
-  NeuronLayer<PerformanceFunction> hiddenLayer1(trainLabels.n_rows);
+  LinearLayer<> inputLayer(trainData.n_rows, hiddenLayerSize);
+  BiasLayer<> biasLayer(hiddenLayerSize, hiddenLayerSize);
+  BaseLayer<PerformanceFunction> hiddenLayer0(trainData.n_rows,
+      hiddenLayerSize);
 
-  OutputLayerType outputLayer;
+  LinearLayer<> hiddenLayer1(hiddenLayerSize, trainLabels.n_rows);
+  BaseLayer<PerformanceFunction> outputLayer(hiddenLayerSize,
+      trainLabels.n_rows);
 
-  FullConnection<
-    decltype(inputLayer),
-    decltype(hiddenLayer0)>
-    layerCon0(inputLayer, hiddenLayer0);
+  OutputLayerType classOutputLayer;
 
-  FullConnection<
-    decltype(biasLayer0),
-    decltype(hiddenLayer0)>
-    layerCon1(biasLayer0, hiddenLayer0);
+  auto modules = std::tie(inputLayer, biasLayer, hiddenLayer0, hiddenLayer1,
+      outputLayer);
 
-  FullConnection<
-      decltype(hiddenLayer0),
-      decltype(hiddenLayer1)>
-      layerCon2(hiddenLayer0, hiddenLayer1);
-
-  auto module0 = std::tie(layerCon0, layerCon1);
-  auto module1 = std::tie(layerCon2);
-  auto modules = std::tie(module0, module1);
-
-  FFNN<decltype(modules), decltype(outputLayer), PerformanceFunctionType>
-      net(modules, outputLayer);
+  FFN<decltype(modules), decltype(classOutputLayer), PerformanceFunctionType>
+      net(modules, classOutputLayer);
 
   Trainer<decltype(net)> trainer(net, maxEpochs, 1, 0.001);
   trainer.Train(trainData, trainLabels, testData, testLabels);
@@ -117,7 +98,7 @@ void BuildVanillaNetwork(MatType& trainData,
   for (size_t i = 0; i < testData.n_cols; i++)
   {
     net.Predict(testData.unsafe_col(i), prediction);
-    if (arma::sum(prediction - testLabels.unsafe_col(i)) == 0)
+    if (arma::sum(arma::abs(prediction - testLabels.unsafe_col(i))) == 0)
       error++;
   }
 
@@ -218,44 +199,25 @@ void BuildDropoutNetwork(MatType& trainData,
    * |     |
    * +-----+
    */
-  BiasLayer<> biasLayer0(1);
-
-  NeuronLayer<PerformanceFunction> inputLayer(trainData.n_rows);
-  NeuronLayer<PerformanceFunction> hiddenLayer0(hiddenLayerSize);
-  DropoutLayer<> dropoutLayer0(hiddenLayerSize);
-  NeuronLayer<PerformanceFunction> hiddenLayer1(trainLabels.n_rows);
-
-  OutputLayerType outputLayer;
 
-  FullConnection<
-    decltype(inputLayer),
-    decltype(hiddenLayer0)>
-    layerCon0(inputLayer, hiddenLayer0);
+  LinearLayer<> inputLayer(trainData.n_rows, hiddenLayerSize);
+  BiasLayer<> biasLayer(hiddenLayerSize, hiddenLayerSize);
+  BaseLayer<PerformanceFunction> hiddenLayer0(trainData.n_rows,
+      hiddenLayerSize);
 
-  FullConnection<
-    decltype(biasLayer0),
-    decltype(hiddenLayer0)>
-    layerCon1(biasLayer0, hiddenLayer0);
+  DropoutLayer<> dropoutLayer0(hiddenLayerSize, hiddenLayerSize);
 
-  IdentityConnection<
-    decltype(hiddenLayer0),
-    decltype(dropoutLayer0),
-    mlpack::ann::RMSPROP,
-    arma::colvec>
-    layerCon1Dropout(hiddenLayer0, dropoutLayer0);
+  LinearLayer<> hiddenLayer1(hiddenLayerSize, trainLabels.n_rows);
+  BaseLayer<PerformanceFunction> outputLayer(hiddenLayerSize,
+      trainLabels.n_rows);
 
-  FullConnection<
-      decltype(dropoutLayer0),
-      decltype(hiddenLayer1)>
-      layerCon2(dropoutLayer0, hiddenLayer1);
+  OutputLayerType classOutputLayer;
 
-  auto module0 = std::tie(layerCon0, layerCon1);
-  auto module0Dropout = std::tie(layerCon1Dropout);
-  auto module1 = std::tie(layerCon2);
-  auto modules = std::tie(module0, module0Dropout, module1);
+  auto modules = std::tie(inputLayer, biasLayer, hiddenLayer0, dropoutLayer0,
+      hiddenLayer1, outputLayer);
 
-  FFNN<decltype(modules), decltype(outputLayer), PerformanceFunctionType>
-      net(modules, outputLayer);
+  FFN<decltype(modules), decltype(classOutputLayer), PerformanceFunctionType>
+      net(modules, classOutputLayer);
 
   Trainer<decltype(net)> trainer(net, maxEpochs, 1, 0.001);
   trainer.Train(trainData, trainLabels, testData, testLabels);
@@ -266,7 +228,7 @@ void BuildDropoutNetwork(MatType& trainData,
   for (size_t i = 0; i < testData.n_cols; i++)
   {
     net.Predict(testData.unsafe_col(i), prediction);
-    if (arma::sum(prediction - testLabels.unsafe_col(i)) == 0)
+    if (arma::sum(arma::abs(prediction - testLabels.unsafe_col(i))) == 0)
       error++;
   }
 
@@ -409,41 +371,27 @@ void BuildNetworkOptimzer(MatType& trainData,
    * |     |
    * +-----+
    */
-  BiasLayer<> biasLayer0(1);
-
-  NeuronLayer<PerformanceFunction> inputLayer(trainData.n_rows);
-  NeuronLayer<PerformanceFunction> hiddenLayer0(hiddenLayerSize);
-  NeuronLayer<PerformanceFunction> hiddenLayer1(trainLabels.n_rows);
-
-  OutputLayerType outputLayer;
-
-  FullConnection<
-    decltype(inputLayer),
-    decltype(hiddenLayer0),
-    mlpack::ann::RMSPROP,
-    decltype(weightInitRule)>
-    layerCon0(inputLayer, hiddenLayer0, weightInitRule);
-
-  FullConnection<
-    decltype(biasLayer0),
-    decltype(hiddenLayer0),
-    mlpack::ann::RMSPROP,
-    decltype(weightInitRule)>
-    layerCon1(biasLayer0, hiddenLayer0, weightInitRule);
-
-  FullConnection<
-      decltype(hiddenLayer0),
-      decltype(hiddenLayer1),
-      mlpack::ann::RMSPROP,
-      decltype(weightInitRule)>
-      layerCon2(hiddenLayer0, hiddenLayer1, weightInitRule);
-
-  auto module0 = std::tie(layerCon0, layerCon1);
-  auto module1 = std::tie(layerCon2);
-  auto modules = std::tie(module0, module1);
-
-  FFNN<decltype(modules), decltype(outputLayer), PerformanceFunctionType>
-      net(modules, outputLayer);
+
+  LinearLayer<mlpack::ann::RMSPROP, WeightInitRule> inputLayer(
+      trainData.n_rows, hiddenLayerSize, weightInitRule);
+
+  BiasLayer<> biasLayer(hiddenLayerSize, hiddenLayerSize);
+  BaseLayer<PerformanceFunction> hiddenLayer0(trainData.n_rows,
+      hiddenLayerSize);
+
+  LinearLayer<mlpack::ann::RMSPROP, WeightInitRule> hiddenLayer1(
+      hiddenLayerSize, trainLabels.n_rows, weightInitRule);
+
+  BaseLayer<PerformanceFunction> outputLayer(hiddenLayerSize,
+      trainLabels.n_rows);
+
+  OutputLayerType classOutputLayer;
+
+  auto modules = std::tie(inputLayer, biasLayer, hiddenLayer0, hiddenLayer1,
+      outputLayer);
+
+  FFN<decltype(modules), OutputLayerType, PerformanceFunctionType>
+      net(modules, classOutputLayer);
 
   Trainer<decltype(net)> trainer(net, epochs, 1);
 



More information about the mlpack-git mailing list