[mlpack-git] master: Decrease the overall test time by decreasing the layer size and number of epochs. (5235578)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Wed Sep 2 09:34:18 EDT 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/2da470c4d465af81639fc5bccbd8a5da84c75cdc...52355783c75dcf250b75362f5734fa357156da62
>---------------------------------------------------------------
commit 52355783c75dcf250b75362f5734fa357156da62
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Wed Sep 2 15:34:09 2015 +0200
Decrease the overall test time by decreasing the layer size and number of epochs.
>---------------------------------------------------------------
52355783c75dcf250b75362f5734fa357156da62
src/mlpack/methods/ann/trainer/trainer.hpp | 2 --
src/mlpack/tests/feedforward_network_test.cpp | 8 ++++----
2 files changed, 4 insertions(+), 6 deletions(-)
diff --git a/src/mlpack/methods/ann/trainer/trainer.hpp b/src/mlpack/methods/ann/trainer/trainer.hpp
index cbb6521..62b331e 100644
--- a/src/mlpack/methods/ann/trainer/trainer.hpp
+++ b/src/mlpack/methods/ann/trainer/trainer.hpp
@@ -84,8 +84,6 @@ class Trainer
ElementCount(trainingData) - 1, ElementCount(trainingData));
epoch = 0;
- size_t foo = 0;
-
while(true)
{
if (shuffle)
diff --git a/src/mlpack/tests/feedforward_network_test.cpp b/src/mlpack/tests/feedforward_network_test.cpp
index 2ffc3d4..29a562c 100644
--- a/src/mlpack/tests/feedforward_network_test.cpp
+++ b/src/mlpack/tests/feedforward_network_test.cpp
@@ -265,7 +265,7 @@ BOOST_AUTO_TEST_CASE(DropoutNetworkTest)
BuildDropoutNetwork<LogisticFunction,
BinaryClassificationLayer,
MeanSquaredErrorFunction>
- (trainData, trainLabels, testData, testLabels, 4, 500, 0.1, 60);
+ (trainData, trainLabels, testData, testLabels, 4, 100, 0.1, 60);
dataset.load("mnist_first250_training_4s_and_9s.arm");
@@ -280,13 +280,13 @@ BOOST_AUTO_TEST_CASE(DropoutNetworkTest)
BuildVanillaNetwork<LogisticFunction,
BinaryClassificationLayer,
MeanSquaredErrorFunction>
- (dataset, labels, dataset, labels, 30, 100, 0.6, 10);
+ (dataset, labels, dataset, labels, 8, 100, 0.6, 10);
// Vanilla neural net with tanh activation function.
BuildVanillaNetwork<TanhFunction,
BinaryClassificationLayer,
MeanSquaredErrorFunction>
- (dataset, labels, dataset, labels, 10, 200, 0.6, 20);
+ (dataset, labels, dataset, labels, 8, 100, 0.6, 20);
}
/**
@@ -424,7 +424,7 @@ BOOST_AUTO_TEST_CASE(NetworkDecreasingErrorTest)
LogisticFunction,
BinaryClassificationLayer,
MeanSquaredErrorFunction>
- (dataset, labels, dataset, labels, 30, 50);
+ (dataset, labels, dataset, labels, 30, 10);
}
BOOST_AUTO_TEST_SUITE_END();
More information about the mlpack-git
mailing list