[mlpack-git] master: Add test case for the layer input size and network input size function. (f941837)

gitdub at mlpack.org gitdub at mlpack.org
Sat Apr 9 07:31:20 EDT 2016


Repository : https://github.com/mlpack/mlpack
On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/ba826b1959a3f83532e91765b2bba0705e588d39...f4b3464fce6bdc7c61d94f6b22bc71fe61276328

>---------------------------------------------------------------

commit f941837e68e8559af81a869182ea38c5c91a3cdd
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Fri Apr 8 15:51:35 2016 +0200

    Add test case for the layer input size and network input size function.


>---------------------------------------------------------------

f941837e68e8559af81a869182ea38c5c91a3cdd
 src/mlpack/tests/network_util_test.cpp | 75 ++++++++++++++++++++++++++--------
 1 file changed, 57 insertions(+), 18 deletions(-)

diff --git a/src/mlpack/tests/network_util_test.cpp b/src/mlpack/tests/network_util_test.cpp
index 05be51f..9e4d3f9 100644
--- a/src/mlpack/tests/network_util_test.cpp
+++ b/src/mlpack/tests/network_util_test.cpp
@@ -23,22 +23,22 @@ BOOST_AUTO_TEST_SUITE(NetworkUtilTest);
  */
 BOOST_AUTO_TEST_CASE(NetworkSizeTest)
 {
-  // // Create a two layer network without weights.
-  // BaseLayer<> baseLayer1;
-  // BaseLayer<> baseLayer2;
-  // auto noneWeightNetwork = std::tie(baseLayer1, baseLayer2);
+  // Create a two layer network without weights.
+  BaseLayer<> baseLayer1;
+  BaseLayer<> baseLayer2;
+  auto noneWeightNetwork = std::tie(baseLayer1, baseLayer2);
 
-  // BOOST_REQUIRE_EQUAL(NetworkSize(noneWeightNetwork), 0);
+  BOOST_REQUIRE_EQUAL(NetworkSize(noneWeightNetwork), 0);
 
-  // // Create a two layer network.
-  // LinearLayer<> linearLayer1(10, 10);
-  // LinearLayer<> linearLayer2(10, 100);
+  // Create a two layer network.
+  LinearLayer<> linearLayer1(10, 10);
+  LinearLayer<> linearLayer2(10, 100);
 
-  // // Reuse the layer form the first network.
-  // auto weightNetwork = std::tie(linearLayer1, baseLayer1, linearLayer2,
-  //     baseLayer2);
+  // Reuse the layer form the first network.
+  auto weightNetwork = std::tie(linearLayer1, baseLayer1, linearLayer2,
+      baseLayer2);
 
-  // BOOST_REQUIRE_EQUAL(NetworkSize(weightNetwork), 1100); 
+  BOOST_REQUIRE_EQUAL(NetworkSize(weightNetwork), 1100);
 }
 
 /**
@@ -46,14 +46,53 @@ BOOST_AUTO_TEST_CASE(NetworkSizeTest)
  */
 BOOST_AUTO_TEST_CASE(LayerSizeTest)
 {
-  // // Create layer without weights.
-  // BaseLayer<> baseLayer;
-  // BOOST_REQUIRE_EQUAL(LayerSize(baseLayer, baseLayer.OutputParameter()), 0);
+  // Create layer without weights.
+  BaseLayer<> baseLayer;
+  BOOST_REQUIRE_EQUAL(LayerSize(baseLayer, baseLayer.OutputParameter()), 0);
 
-  // LinearLayer<> linearLayer(10, 10);
-  // BOOST_REQUIRE_EQUAL(LayerSize(linearLayer,
-  //     linearLayer.OutputParameter()), 100);
+  // Create layer with weights.
+  LinearLayer<> linearLayer(10, 10);
+  BOOST_REQUIRE_EQUAL(LayerSize(linearLayer,
+      linearLayer.OutputParameter()), 100);
+}
+
+/**
+ * Test the network input size auxiliary function.
+ */
+BOOST_AUTO_TEST_CASE(NetworkInputSizeTest)
+{
+  // Create a two layer network without weights.
+  BaseLayer<> baseLayer1;
+  BaseLayer<> baseLayer2;
+  auto noneWeightNetwork = std::tie(baseLayer1, baseLayer2);
+
+  BOOST_REQUIRE_EQUAL(NetworkInputSize(noneWeightNetwork), 0);
+
+  // Create a two layer network.
+  LinearLayer<> linearLayer1(5, 10);
+  LinearLayer<> linearLayer2(10, 100);
+
+  // Reuse the layer form the first network.
+  auto weightNetwork = std::tie(linearLayer1, baseLayer1, linearLayer2,
+      baseLayer2);
+
+  BOOST_REQUIRE_EQUAL(NetworkInputSize(weightNetwork), 5);
+}
+
+/**
+ * Test the layer input size auxiliary function.
+ */
+BOOST_AUTO_TEST_CASE(LayerInputSizeTest)
+{
+  // Create layer without weights.
+  BaseLayer<> baseLayer;
+  BOOST_REQUIRE_EQUAL(LayerInputSize(baseLayer,
+    baseLayer.OutputParameter()), 0);
 
+  // Create layer with weights.
+  LinearLayer<> linearLayer(5, 10);
+  BOOST_REQUIRE_EQUAL(LayerInputSize(linearLayer,
+      linearLayer.OutputParameter()), 5);
 }
 
 BOOST_AUTO_TEST_SUITE_END();




More information about the mlpack-git mailing list