[mlpack-git] mlpack-2.x: Remove code that should not be released. (909d69e)

gitdub at mlpack.org gitdub at mlpack.org
Thu Jun 16 09:54:59 EDT 2016


Repository : https://github.com/mlpack/mlpack
On branch  : mlpack-2.x
Link       : https://github.com/mlpack/mlpack/compare/281fb8ac87e6bd6b0f7b12c66cc58dba98c49e46...909d69e4bff15bb4db0b3d05739f6963123e346a

>---------------------------------------------------------------

commit 909d69e4bff15bb4db0b3d05739f6963123e346a
Author: Ryan Curtin <ryan at ratml.org>
Date:   Thu Jun 16 06:54:59 2016 -0700

    Remove code that should not be released.


>---------------------------------------------------------------

909d69e4bff15bb4db0b3d05739f6963123e346a
 src/mlpack/methods/CMakeLists.txt      |   1 -
 src/mlpack/tests/CMakeLists.txt        |   4 -
 src/mlpack/tests/network_util_test.cpp | 144 ---------------------------------
 3 files changed, 149 deletions(-)

diff --git a/src/mlpack/methods/CMakeLists.txt b/src/mlpack/methods/CMakeLists.txt
index 9b6ee0d..9d1fbf4 100644
--- a/src/mlpack/methods/CMakeLists.txt
+++ b/src/mlpack/methods/CMakeLists.txt
@@ -45,7 +45,6 @@ set(DIRS
   radical
   range_search
   rann
-  rmva
   regularized_svd
   softmax_regression
   sparse_autoencoder
diff --git a/src/mlpack/tests/CMakeLists.txt b/src/mlpack/tests/CMakeLists.txt
index 37dd752..1b97640 100644
--- a/src/mlpack/tests/CMakeLists.txt
+++ b/src/mlpack/tests/CMakeLists.txt
@@ -2,8 +2,6 @@
 add_executable(mlpack_test
   mlpack_test.cpp
   adaboost_test.cpp
-  adam_test.cpp
-  ada_delta_test.cpp
   allkrann_search_test.cpp
   arma_extend_test.cpp
   aug_lagrangian_test.cpp
@@ -43,7 +41,6 @@ add_executable(mlpack_test
   minibatch_sgd_test.cpp
   nbc_test.cpp
   nca_test.cpp
-  network_util_test.cpp
   nmf_test.cpp
   pca_test.cpp
   perceptron_test.cpp
@@ -71,7 +68,6 @@ add_executable(mlpack_test
   svd_incremental_test.cpp
   nystroem_method_test.cpp
   armadillo_svd_test.cpp
-  recurrent_network_test.cpp
 )
 # Link dependencies of test executable.
 target_link_libraries(mlpack_test
diff --git a/src/mlpack/tests/network_util_test.cpp b/src/mlpack/tests/network_util_test.cpp
deleted file mode 100644
index 766ed85..0000000
--- a/src/mlpack/tests/network_util_test.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * @file network_util_test.cpp
- * @author Marcus Edel
- *
- * Simple tests for things in the network_util file.
- */
-#include <mlpack/core.hpp>
-
-#include <mlpack/methods/ann/network_util.hpp>
-#include <mlpack/methods/ann/layer/linear_layer.hpp>
-#include <mlpack/methods/ann/layer/base_layer.hpp>
-#include <mlpack/methods/ann/init_rules/random_init.hpp>
-
-#include <boost/test/unit_test.hpp>
-#include "old_boost_test_definitions.hpp"
-
-using namespace mlpack;
-using namespace mlpack::ann;
-
-BOOST_AUTO_TEST_SUITE(NetworkUtilTest);
-
-/**
- * Test the network size auxiliary function.
- */
-BOOST_AUTO_TEST_CASE(NetworkSizeTest)
-{
-  // Create a two layer network without weights.
-  BaseLayer<> baseLayer1;
-  BaseLayer<> baseLayer2;
-  auto noneWeightNetwork = std::tie(baseLayer1, baseLayer2);
-
-  BOOST_REQUIRE_EQUAL(NetworkSize(noneWeightNetwork), 0);
-
-  // Create a two layer network.
-  LinearLayer<> linearLayer1(10, 10);
-  LinearLayer<> linearLayer2(10, 100);
-
-  // Reuse the layer form the first network.
-  auto weightNetwork = std::tie(linearLayer1, baseLayer1, linearLayer2,
-      baseLayer2);
-
-  BOOST_REQUIRE_EQUAL(NetworkSize(weightNetwork), 1100);
-}
-
-/**
- * Test the layer size auxiliary function.
- */
-BOOST_AUTO_TEST_CASE(LayerSizeTest)
-{
-  // Create layer without weights.
-  BaseLayer<> baseLayer;
-  BOOST_REQUIRE_EQUAL(LayerSize(baseLayer, baseLayer.OutputParameter()), 0);
-
-  // Create layer with weights.
-  LinearLayer<> linearLayer(10, 10);
-  BOOST_REQUIRE_EQUAL(LayerSize(linearLayer,
-      linearLayer.OutputParameter()), 100);
-}
-
-/**
- * Test the network input size auxiliary function.
- */
-BOOST_AUTO_TEST_CASE(NetworkInputSizeTest)
-{
-  // Create a two layer network without weights.
-  BaseLayer<> baseLayer1;
-  BaseLayer<> baseLayer2;
-  auto noneWeightNetwork = std::tie(baseLayer1, baseLayer2);
-
-  BOOST_REQUIRE_EQUAL(NetworkInputSize(noneWeightNetwork), 0);
-
-  // Create a two layer network.
-  LinearLayer<> linearLayer1(5, 10);
-  LinearLayer<> linearLayer2(10, 100);
-
-  // Reuse the layer form the first network.
-  auto weightNetwork = std::tie(linearLayer1, baseLayer1, linearLayer2,
-      baseLayer2);
-
-  BOOST_REQUIRE_EQUAL(NetworkInputSize(weightNetwork), 5);
-}
-
-/**
- * Test the layer input size auxiliary function.
- */
-BOOST_AUTO_TEST_CASE(LayerInputSizeTest)
-{
-  // Create layer without weights.
-  BaseLayer<> baseLayer;
-  BOOST_REQUIRE_EQUAL(LayerInputSize(baseLayer,
-    baseLayer.OutputParameter()), 0);
-
-  // Create layer with weights.
-  LinearLayer<> linearLayer(5, 10);
-  BOOST_REQUIRE_EQUAL(LayerInputSize(linearLayer,
-      linearLayer.OutputParameter()), 5);
-}
-
-/**
- * Test the network weight auxiliary function using the given initialization
- * rule.
- */
-BOOST_AUTO_TEST_CASE(NetworkWeightsInitTest)
-{
-  // Create a two layer network.
-  LinearLayer<> linearLayer1(10, 10);
-  LinearLayer<> linearLayer2(10, 100);
-
-  arma::mat parameter = arma::zeros<arma::mat>(1100, 1);
-
-  // Create the network.
-  auto network = std::tie(linearLayer1, linearLayer2);
-
-  BOOST_REQUIRE_EQUAL(arma::accu(parameter), 0);
-
-  RandomInitialization constantInit(1, 1);
-  NetworkWeights(constantInit, parameter, network);
-
-  BOOST_REQUIRE_EQUAL(arma::accu(linearLayer1.Weights()), 100);
-  BOOST_REQUIRE_EQUAL(arma::accu(linearLayer2.Weights()), 1000);
-  BOOST_REQUIRE_EQUAL(arma::accu(parameter), 1100);
-}
-
-/**
- * Test the layer weight auxiliary function using the given initialization rule.
- */
-BOOST_AUTO_TEST_CASE(LayerWeightsInitTest)
-{
-  // Create a two layer network.
-  LinearLayer<> linearLayer1(10, 10);
-
-  arma::mat parameter = arma::zeros<arma::mat>(100, 1);
-
-  BOOST_REQUIRE_EQUAL(arma::accu(parameter), 0);
-
-  RandomInitialization constantInit(1, 1);
-  arma::mat output;
-  LayerWeights(constantInit, linearLayer1, parameter, 0, output);
-
-  BOOST_REQUIRE_EQUAL(arma::accu(linearLayer1.Weights()), 100);
-  BOOST_REQUIRE_EQUAL(arma::accu(parameter), 100);
-}
-
-BOOST_AUTO_TEST_SUITE_END();




More information about the mlpack-git mailing list