[mlpack-git] master: Update tests to use Train(). (1a0a4ce)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Fri Dec 18 11:43:08 EST 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/5ba11bc90223b55eecd5da4cfbe86c8fc40637a5...df229e45a5bd7842fe019e9d49ed32f13beb6aaa
>---------------------------------------------------------------
commit 1a0a4ce392c64875df9bcb2d1025dc0cecb637d3
Author: Ryan Curtin <ryan at ratml.org>
Date: Wed Dec 16 21:02:20 2015 +0000
Update tests to use Train().
>---------------------------------------------------------------
1a0a4ce392c64875df9bcb2d1025dc0cecb637d3
src/mlpack/tests/distribution_test.cpp | 12 ++++++------
src/mlpack/tests/gmm_test.cpp | 26 +++++++++++++-------------
2 files changed, 19 insertions(+), 19 deletions(-)
diff --git a/src/mlpack/tests/distribution_test.cpp b/src/mlpack/tests/distribution_test.cpp
index f4f6b50..0ca0dfd 100644
--- a/src/mlpack/tests/distribution_test.cpp
+++ b/src/mlpack/tests/distribution_test.cpp
@@ -73,13 +73,13 @@ BOOST_AUTO_TEST_CASE(DiscreteDistributionRandomTest)
/**
* Make sure we can estimate from observations correctly.
*/
-BOOST_AUTO_TEST_CASE(DiscreteDistributionEstimateTest)
+BOOST_AUTO_TEST_CASE(DiscreteDistributionTrainTest)
{
DiscreteDistribution d(4);
arma::mat obs("0 0 1 1 2 2 2 3");
- d.Estimate(obs);
+ d.Train(obs);
BOOST_REQUIRE_CLOSE(d.Probability("0"), 0.25, 1e-5);
BOOST_REQUIRE_CLOSE(d.Probability("1"), 0.25, 1e-5);
@@ -90,7 +90,7 @@ BOOST_AUTO_TEST_CASE(DiscreteDistributionEstimateTest)
/**
* Estimate from observations with probabilities.
*/
-BOOST_AUTO_TEST_CASE(DiscreteDistributionEstimateProbTest)
+BOOST_AUTO_TEST_CASE(DiscreteDistributionTrainProbTest)
{
DiscreteDistribution d(3);
@@ -98,7 +98,7 @@ BOOST_AUTO_TEST_CASE(DiscreteDistributionEstimateProbTest)
arma::vec prob("0.25 0.25 0.5 1.0");
- d.Estimate(obs, prob);
+ d.Train(obs, prob);
BOOST_REQUIRE_CLOSE(d.Probability("0"), 0.25, 1e-5);
BOOST_REQUIRE_CLOSE(d.Probability("1"), 0.25, 1e-5);
@@ -340,7 +340,7 @@ BOOST_AUTO_TEST_CASE(GaussianDistributionRandomTest)
/**
* Make sure that we can properly estimate from given observations.
*/
-BOOST_AUTO_TEST_CASE(GaussianDistributionEstimateTest)
+BOOST_AUTO_TEST_CASE(GaussianDistributionTrainTest)
{
arma::vec mean("1.0 3.0 0.0 2.5");
arma::mat cov("3.0 0.0 1.0 4.0;"
@@ -362,7 +362,7 @@ BOOST_AUTO_TEST_CASE(GaussianDistributionEstimateTest)
arma::vec actualMean = arma::mean(observations, 1);
arma::mat actualCov = ccov(observations);
- d.Estimate(observations);
+ d.Train(observations);
// Check that everything is estimated right.
for (size_t i = 0; i < 4; i++)
diff --git a/src/mlpack/tests/gmm_test.cpp b/src/mlpack/tests/gmm_test.cpp
index 41b24ab..603313c 100644
--- a/src/mlpack/tests/gmm_test.cpp
+++ b/src/mlpack/tests/gmm_test.cpp
@@ -100,7 +100,7 @@ BOOST_AUTO_TEST_CASE(GMMTrainEMOneGaussian)
// Now, train the model.
GMM<> gmm(1, 2);
- gmm.Estimate(data, 10);
+ gmm.Train(data, 10);
arma::vec actualMean = arma::mean(data, 1);
arma::mat actualCovar = ccov(data, 1 /* biased estimator */);
@@ -194,7 +194,7 @@ BOOST_AUTO_TEST_CASE(GMMTrainEMMultipleGaussians)
// Now train the model.
GMM<> gmm(gaussians, dims);
- gmm.Estimate(data, 10);
+ gmm.Train(data, 10);
arma::uvec sortRef = sort_index(weights);
arma::uvec sortTry = sort_index(gmm.Weights());
@@ -220,7 +220,7 @@ BOOST_AUTO_TEST_CASE(GMMTrainEMMultipleGaussians)
}
/**
- * Train a single-gaussian mixture, but using the overload of Estimate() where
+ * Train a single-gaussian mixture, but using the overload of Train() where
* probabilities of the observation are given.
*/
BOOST_AUTO_TEST_CASE(GMMTrainEMSingleGaussianWithProbability)
@@ -237,7 +237,7 @@ BOOST_AUTO_TEST_CASE(GMMTrainEMSingleGaussianWithProbability)
// Now train the model.
GMM<> g(1, 2);
- g.Estimate(observations, probabilities, 10);
+ g.Train(observations, probabilities, 10);
// Check that it is trained correctly. 5% tolerance because of random error
// present in observations.
@@ -254,7 +254,7 @@ BOOST_AUTO_TEST_CASE(GMMTrainEMSingleGaussianWithProbability)
}
/**
- * Train a multi-Gaussian mixture, using the overload of Estimate() where
+ * Train a multi-Gaussian mixture, using the overload of Train() where
* probabilities of the observation are given.
*/
BOOST_AUTO_TEST_CASE(GMMTrainEMMultipleGaussiansWithProbability)
@@ -308,7 +308,7 @@ BOOST_AUTO_TEST_CASE(GMMTrainEMMultipleGaussiansWithProbability)
// Now train the model.
GMM<> g(4, 3); // 3 dimensions, 4 components.
- g.Estimate(points, probabilities, 8);
+ g.Train(points, probabilities, 8);
// Now check the results. We need to order by weights so that when we do the
// checking, things will be correct.
@@ -393,7 +393,7 @@ BOOST_AUTO_TEST_CASE(GMMRandomTest)
// A new one which we'll train.
GMM<> gmm2(2, 2);
- gmm2.Estimate(observations, 10);
+ gmm2.Train(observations, 10);
// Now check the results. We need to order by weights so that when we do the
// checking, things will be correct.
@@ -683,12 +683,12 @@ BOOST_AUTO_TEST_CASE(UseExistingModelTest)
// Now train the model.
GMM<> gmm(gaussians, dims);
- gmm.Estimate(data, 10);
+ gmm.Train(data, 10);
GMM<> oldgmm(gmm);
// Retrain the model with the existing model as the starting point.
- gmm.Estimate(data, 1, true);
+ gmm.Train(data, 1, true);
// Check for similarity.
for (size_t i = 0; i < gmm.Gaussians(); ++i)
@@ -710,7 +710,7 @@ BOOST_AUTO_TEST_CASE(UseExistingModelTest)
gmm = oldgmm;
// Retrain the model with the existing model as the starting point.
- gmm.Estimate(data, 10, true);
+ gmm.Train(data, 10, true);
// Check for similarity.
for (size_t i = 0; i < gmm.Gaussians(); ++i)
@@ -728,13 +728,13 @@ BOOST_AUTO_TEST_CASE(UseExistingModelTest)
}
}
- // Do it again, but using the overload of Estimate() that takes probabilities
+ // Do it again, but using the overload of Train() that takes probabilities
// into account.
arma::vec probabilities(data.n_cols);
probabilities.ones(); // Fill with ones.
gmm = oldgmm;
- gmm.Estimate(data, probabilities, 1, true);
+ gmm.Train(data, probabilities, 1, true);
// Check for similarity.
for (size_t i = 0; i < gmm.Gaussians(); ++i)
@@ -754,7 +754,7 @@ BOOST_AUTO_TEST_CASE(UseExistingModelTest)
// One more time, with multiple trials.
gmm = oldgmm;
- gmm.Estimate(data, probabilities, 10, true);
+ gmm.Train(data, probabilities, 10, true);
// Check for similarity.
for (size_t i = 0; i < gmm.Gaussians(); ++i)
More information about the mlpack-git
mailing list