[mlpack-git] master: Add a test for AdaDelta. (612affd)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Tue Oct 20 05:41:39 EDT 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/fecf1194c123ced12d56e7daad761c7b9aaac262...67e0a132c7f62820c734eb508fe1bc83128a3e13
>---------------------------------------------------------------
commit 612affdea6294ad2948a0d5c21e422084fdfb3e8
Author: marcus <marcus.edel at fu-berlin.de>
Date: Sun Oct 18 16:17:03 2015 +0200
Add a test for AdaDelta.
>---------------------------------------------------------------
612affdea6294ad2948a0d5c21e422084fdfb3e8
src/mlpack/methods/ann/optimizer/CMakeLists.txt | 3 +++
src/mlpack/tests/CMakeLists.txt | 1 +
.../tests/{rmsprop_test.cpp => ada_delta_test.cpp} | 22 +++++++++++-----------
3 files changed, 15 insertions(+), 11 deletions(-)
diff --git a/src/mlpack/methods/ann/optimizer/CMakeLists.txt b/src/mlpack/methods/ann/optimizer/CMakeLists.txt
index f67cff1..25d4fdb 100644
--- a/src/mlpack/methods/ann/optimizer/CMakeLists.txt
+++ b/src/mlpack/methods/ann/optimizer/CMakeLists.txt
@@ -1,7 +1,10 @@
# Define the files we need to compile
# Anything not in this list will not be compiled into MLPACK.
set(SOURCES
+ ada_delta.hpp
+ adam.hpp
rmsprop.hpp
+ steepest_descent.hpp
)
# Add directory name to sources.
diff --git a/src/mlpack/tests/CMakeLists.txt b/src/mlpack/tests/CMakeLists.txt
index 43e7dd0..25bb8e2 100644
--- a/src/mlpack/tests/CMakeLists.txt
+++ b/src/mlpack/tests/CMakeLists.txt
@@ -3,6 +3,7 @@ add_executable(mlpack_test
mlpack_test.cpp
activation_functions_test.cpp
adaboost_test.cpp
+ ada_delta_test.cpp
allkfn_test.cpp
allknn_test.cpp
allkrann_search_test.cpp
diff --git a/src/mlpack/tests/rmsprop_test.cpp b/src/mlpack/tests/ada_delta_test.cpp
similarity index 82%
copy from src/mlpack/tests/rmsprop_test.cpp
copy to src/mlpack/tests/ada_delta_test.cpp
index 0ae76ba..aa78119 100644
--- a/src/mlpack/tests/rmsprop_test.cpp
+++ b/src/mlpack/tests/ada_delta_test.cpp
@@ -1,8 +1,8 @@
/**
- * @file rmsprop_test.cpp
+ * @file ada_delta_test.cpp
* @author Marcus Edel
*
- * Tests the RMSProp optimizer on a couple test models.
+ * Tests the AdaDelta optimizer on a couple test models.
*/
#include <mlpack/core.hpp>
@@ -18,7 +18,7 @@
#include <mlpack/methods/ann/trainer/trainer.hpp>
#include <mlpack/methods/ann/ffn.hpp>
#include <mlpack/methods/ann/performance_functions/mse_function.hpp>
-#include <mlpack/methods/ann/optimizer/rmsprop.hpp>
+#include <mlpack/methods/ann/optimizer/ada_delta.hpp>
#include <boost/test/unit_test.hpp>
#include "old_boost_test_definitions.hpp"
@@ -26,14 +26,14 @@
using namespace mlpack;
using namespace mlpack::ann;
-BOOST_AUTO_TEST_SUITE(RMSPropTest);
+BOOST_AUTO_TEST_SUITE(AdaDeltaTest);
/**
* Train and evaluate a vanilla network with the specified structure. Using the
* iris data, the data set contains 3 classes. One class is linearly separable
* from the other 2. The other two aren't linearly separable from each other.
*/
-BOOST_AUTO_TEST_CASE(SimpleRMSPropTestFunction)
+BOOST_AUTO_TEST_CASE(SimpleAdaDeltaTestFunction)
{
const size_t hiddenLayerSize = 10;
const size_t maxEpochs = 100;
@@ -51,15 +51,15 @@ BOOST_AUTO_TEST_CASE(SimpleRMSPropTestFunction)
// Construct a feed forward network using the specified parameters.
RandomInitialization randInit(0.5, 0.5);
- LinearLayer<RMSPROP, RandomInitialization> inputLayer(dataset.n_rows,
+ LinearLayer<AdaDelta, RandomInitialization> inputLayer(dataset.n_rows,
hiddenLayerSize, randInit);
- BiasLayer<RMSPROP, RandomInitialization> inputBiasLayer(hiddenLayerSize,
+ BiasLayer<AdaDelta, RandomInitialization> inputBiasLayer(hiddenLayerSize,
1, randInit);
BaseLayer<LogisticFunction> inputBaseLayer;
- LinearLayer<RMSPROP, RandomInitialization> hiddenLayer1(hiddenLayerSize,
+ LinearLayer<AdaDelta, RandomInitialization> hiddenLayer1(hiddenLayerSize,
labels.n_rows, randInit);
- BiasLayer<RMSPROP, RandomInitialization> hiddenBiasLayer1(labels.n_rows,
+ BiasLayer<AdaDelta, RandomInitialization> hiddenBiasLayer1(labels.n_rows,
1, randInit);
BaseLayer<LogisticFunction> outputLayer;
@@ -87,7 +87,7 @@ BOOST_AUTO_TEST_CASE(SimpleRMSPropTestFunction)
// Check if the selected model isn't already optimized.
double classificationError = 1 - double(error) / dataset.n_cols;
- BOOST_REQUIRE_GE(classificationError, 0.05);
+ BOOST_REQUIRE_GE(classificationError, 0.09);
// Train the feed forward network.
Trainer<decltype(net)> trainer(net, maxEpochs, 1, 0.01);
@@ -107,7 +107,7 @@ BOOST_AUTO_TEST_CASE(SimpleRMSPropTestFunction)
classificationError = 1 - double(error) / dataset.n_cols;
- BOOST_REQUIRE_LE(classificationError, 0.05);
+ BOOST_REQUIRE_LE(classificationError, 0.09);
}
BOOST_AUTO_TEST_SUITE_END();
More information about the mlpack-git
mailing list