[mlpack-git] master: Use the simplified performance function. (fa9937b)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Mon Jun 1 17:28:33 EDT 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/0547fb75a32eda7e273651a7e6b6a258c5885a1e...61d7876048f2208cf45d41d71f9d4baa825e2a51
>---------------------------------------------------------------
commit fa9937b6324c14609718fbe1c865915f0cb9ed5a
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Mon Jun 1 23:14:41 2015 +0200
Use the simplified performance function.
>---------------------------------------------------------------
fa9937b6324c14609718fbe1c865915f0cb9ed5a
src/mlpack/tests/feedforward_network_test.cpp | 22 +++++++++++-----------
src/mlpack/tests/recurrent_network_test.cpp | 20 ++++++++++----------
2 files changed, 21 insertions(+), 21 deletions(-)
diff --git a/src/mlpack/tests/feedforward_network_test.cpp b/src/mlpack/tests/feedforward_network_test.cpp
index a4ce5e2..1fd8265 100644
--- a/src/mlpack/tests/feedforward_network_test.cpp
+++ b/src/mlpack/tests/feedforward_network_test.cpp
@@ -162,15 +162,15 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
RandomInitialization randInitA(1, 2);
// Vanilla neural net with logistic activation function.
- // Because 92 percent of the patients are not hyperthyroid a the neural
- // network mst be significant better than 92%.
+ // Because 92 percent of the patients are not hyperthyroid the neural
+ // network must be significant better than 92%.
BuildVanillaNetwork<RandomInitialization,
LogisticFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(trainData, trainLabels, testData, testLabels, 4, 500,
- 0.3, 60, randInitA);
+ 0.1, 60, randInitA);
dataset.load("mnist_first250_training_4s_and_9s.arm");
@@ -188,7 +188,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
LogisticFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(dataset, labels, dataset, labels, 100, 100, 0.6, 10, randInitB);
// Vanilla neural net with tanh activation function.
@@ -196,7 +196,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkTest)
TanhFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(dataset, labels, dataset, labels, 10, 200, 0.6, 20, randInitB);
}
@@ -220,7 +220,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
LogisticFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 4, 0, 0, 0.01, randInit);
// Vanilla neural net with tanh activation function.
@@ -228,7 +228,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
TanhFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 4, 0, 0, 0.01, randInit);
// Test on a linearly separable dataset (AND).
@@ -241,7 +241,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
LogisticFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 4, 0, 0, 0.01, randInit);
// Vanilla neural net with tanh activation function.
@@ -249,7 +249,7 @@ BOOST_AUTO_TEST_CASE(VanillaNetworkConvergenceTest)
TanhFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 4, 0, 0, 0.01, randInit);
}
@@ -372,7 +372,7 @@ BOOST_AUTO_TEST_CASE(NetworkDecreasingErrorTest)
LogisticFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(dataset, labels, dataset, labels, 100, 50, randInitB);
}
diff --git a/src/mlpack/tests/recurrent_network_test.cpp b/src/mlpack/tests/recurrent_network_test.cpp
index e29face..566a821 100644
--- a/src/mlpack/tests/recurrent_network_test.cpp
+++ b/src/mlpack/tests/recurrent_network_test.cpp
@@ -16,7 +16,7 @@
#include <mlpack/methods/ann/init_rules/nguyen_widrow_init.hpp>
#include <mlpack/methods/ann/layer/neuron_layer.hpp>
- #include <mlpack/methods/ann/layer/lstm_layer.hpp>
+#include <mlpack/methods/ann/layer/lstm_layer.hpp>
#include <mlpack/methods/ann/layer/bias_layer.hpp>
#include <mlpack/methods/ann/layer/binary_classification_layer.hpp>
#include <mlpack/methods/ann/layer/multiclass_classification_layer.hpp>
@@ -119,7 +119,7 @@ BOOST_AUTO_TEST_CASE(SequenceClassificationTest)
SteepestDescent< > conOptimizer3(hiddenLayer0.InputSize(),
hiddenLayer1.OutputSize(), 1, 0);
- NguyenWidrowInitialization randInit;
+ RandomInitialization randInit(-0.5, 0.5);
FullConnection<
decltype(inputLayer),
@@ -148,7 +148,7 @@ BOOST_AUTO_TEST_CASE(SequenceClassificationTest)
RNN<decltype(modules),
decltype(outputLayer),
- MeanSquaredErrorFunction<> > net(modules, outputLayer);
+ MeanSquaredErrorFunction> net(modules, outputLayer);
// Train the network for 1000 epochs.
Trainer<decltype(net)> trainer(net, 1000);
@@ -328,7 +328,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
LogisticFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 10, 10, randInit);
// Vanilla neural net with identity activation function.
@@ -336,7 +336,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
IdentityFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 1, 1, randInit);
// Vanilla neural net with rectifier activation function.
@@ -344,7 +344,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
RectifierFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 10, 10, randInit);
// Vanilla neural net with softsign activation function.
@@ -352,7 +352,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
SoftsignFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 10, 10, randInit);
// Vanilla neural net with tanh activation function.
@@ -360,7 +360,7 @@ BOOST_AUTO_TEST_CASE(FeedForwardRecurrentNetworkTest)
TanhFunction,
SteepestDescent<>,
BinaryClassificationLayer<>,
- MeanSquaredErrorFunction<> >
+ MeanSquaredErrorFunction>
(input, labels, input, labels, 10, 10, randInit);
}
@@ -626,7 +626,7 @@ void ReberGrammarTestNetwork(HiddenLayerType& hiddenLayer0,
RNN<decltype(modules),
decltype(outputLayer),
- MeanSquaredErrorFunction<> > net(modules, outputLayer);
+ MeanSquaredErrorFunction> net(modules, outputLayer);
// Train the network for (500 * trainReberGrammarCount) epochs.
Trainer<decltype(net)> trainer(net, 1, 1, 0, false);
@@ -861,7 +861,7 @@ void DistractedSequenceRecallTestNetwork(HiddenLayerType& hiddenLayer0)
RNN<decltype(modules),
decltype(outputLayer),
- MeanSquaredErrorFunction<> > net(modules, outputLayer);
+ MeanSquaredErrorFunction> net(modules, outputLayer);
// Train the network for (500 * trainDistractedSequenceCount) epochs.
Trainer<decltype(net)> trainer(net, 1, 1, 0, false);
More information about the mlpack-git
mailing list