[mlpack-git] master: Add test case for the layer and network weights function that uses a given initialization rule to initialize the weights. (07289ba)
gitdub at mlpack.org
gitdub at mlpack.org
Sat Apr 9 07:31:20 EDT 2016
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/ba826b1959a3f83532e91765b2bba0705e588d39...f4b3464fce6bdc7c61d94f6b22bc71fe61276328
>---------------------------------------------------------------
commit 07289ba8d6ae0c4c0eceee0f9aae1ea7dd7bbfde
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Fri Apr 8 17:13:18 2016 +0200
Add test case for the layer and network weights function that uses a given initialization rule to initialize the weights.
>---------------------------------------------------------------
07289ba8d6ae0c4c0eceee0f9aae1ea7dd7bbfde
src/mlpack/tests/network_util_test.cpp | 48 +++++++++++++++++++++++++++++++++-
1 file changed, 47 insertions(+), 1 deletion(-)
diff --git a/src/mlpack/tests/network_util_test.cpp b/src/mlpack/tests/network_util_test.cpp
index 9e4d3f9..766ed85 100644
--- a/src/mlpack/tests/network_util_test.cpp
+++ b/src/mlpack/tests/network_util_test.cpp
@@ -1,6 +1,6 @@
/**
* @file network_util_test.cpp
- * @author Marcus edel
+ * @author Marcus Edel
*
* Simple tests for things in the network_util file.
*/
@@ -9,6 +9,7 @@
#include <mlpack/methods/ann/network_util.hpp>
#include <mlpack/methods/ann/layer/linear_layer.hpp>
#include <mlpack/methods/ann/layer/base_layer.hpp>
+#include <mlpack/methods/ann/init_rules/random_init.hpp>
#include <boost/test/unit_test.hpp>
#include "old_boost_test_definitions.hpp"
@@ -95,4 +96,49 @@ BOOST_AUTO_TEST_CASE(LayerInputSizeTest)
linearLayer.OutputParameter()), 5);
}
+/**
+ * Test the network weight auxiliary function using the given initialization
+ * rule.
+ */
+BOOST_AUTO_TEST_CASE(NetworkWeightsInitTest)
+{
+ // Create a two layer network.
+ LinearLayer<> linearLayer1(10, 10);
+ LinearLayer<> linearLayer2(10, 100);
+
+ arma::mat parameter = arma::zeros<arma::mat>(1100, 1);
+
+ // Create the network.
+ auto network = std::tie(linearLayer1, linearLayer2);
+
+ BOOST_REQUIRE_EQUAL(arma::accu(parameter), 0);
+
+ RandomInitialization constantInit(1, 1);
+ NetworkWeights(constantInit, parameter, network);
+
+ BOOST_REQUIRE_EQUAL(arma::accu(linearLayer1.Weights()), 100);
+ BOOST_REQUIRE_EQUAL(arma::accu(linearLayer2.Weights()), 1000);
+ BOOST_REQUIRE_EQUAL(arma::accu(parameter), 1100);
+}
+
+/**
+ * Test the layer weight auxiliary function using the given initialization rule.
+ */
+BOOST_AUTO_TEST_CASE(LayerWeightsInitTest)
+{
+ // Create a two layer network.
+ LinearLayer<> linearLayer1(10, 10);
+
+ arma::mat parameter = arma::zeros<arma::mat>(100, 1);
+
+ BOOST_REQUIRE_EQUAL(arma::accu(parameter), 0);
+
+ RandomInitialization constantInit(1, 1);
+ arma::mat output;
+ LayerWeights(constantInit, linearLayer1, parameter, 0, output);
+
+ BOOST_REQUIRE_EQUAL(arma::accu(linearLayer1.Weights()), 100);
+ BOOST_REQUIRE_EQUAL(arma::accu(parameter), 100);
+}
+
BOOST_AUTO_TEST_SUITE_END();
More information about the mlpack-git
mailing list