[mlpack-git] master: Rename OptimizeCode() to Encode(). (3b926fd)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Fri Dec 11 12:47:10 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/dd7c8b93fe5f299cb534cda70c1c786456f9a78f...3b926fd86ab143eb8af7327b9fb89fead7538df0

>---------------------------------------------------------------

commit 3b926fd86ab143eb8af7327b9fb89fead7538df0
Author: Ryan Curtin <ryan at ratml.org>
Date:   Fri Dec 11 17:46:32 2015 +0000

    Rename OptimizeCode() to Encode().


>---------------------------------------------------------------

3b926fd86ab143eb8af7327b9fb89fead7538df0
 src/mlpack/methods/sparse_coding/sparse_coding.cpp      |  2 +-
 src/mlpack/methods/sparse_coding/sparse_coding.hpp      | 13 ++++++++++---
 src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp |  4 ++--
 src/mlpack/methods/sparse_coding/sparse_coding_main.cpp |  2 +-
 src/mlpack/tests/sparse_coding_test.cpp                 | 14 +++++++-------
 5 files changed, 21 insertions(+), 14 deletions(-)

diff --git a/src/mlpack/methods/sparse_coding/sparse_coding.cpp b/src/mlpack/methods/sparse_coding/sparse_coding.cpp
index 9349b7f..be2cd6a 100644
--- a/src/mlpack/methods/sparse_coding/sparse_coding.cpp
+++ b/src/mlpack/methods/sparse_coding/sparse_coding.cpp
@@ -27,7 +27,7 @@ SparseCoding::SparseCoding(
   // Nothing to do.
 }
 
-void SparseCoding::OptimizeCode(const arma::mat& data, arma::mat& codes)
+void SparseCoding::Encode(const arma::mat& data, arma::mat& codes)
 {
   // When using the Cholesky version of LARS, this is correct even if
   // lambda2 > 0.
diff --git a/src/mlpack/methods/sparse_coding/sparse_coding.hpp b/src/mlpack/methods/sparse_coding/sparse_coding.hpp
index b7b1a1d..8d2b0b5 100644
--- a/src/mlpack/methods/sparse_coding/sparse_coding.hpp
+++ b/src/mlpack/methods/sparse_coding/sparse_coding.hpp
@@ -92,7 +92,7 @@ namespace sparse_coding {
  * algorithm from Honglak Lee's paper, but instead the LARS algorithm suggested
  * in that paper.
  *
- * Before the method is run, the dictionary is initialized using the
+ * When Train() is called, the dictionary is initialized using the
  * DictionaryInitializationPolicy class.  Possible choices include the
  * RandomInitializer, which provides an entirely random dictionary, the
  * DataDependentRandomInitializer, which provides a random dictionary based
@@ -100,6 +100,9 @@ namespace sparse_coding {
  * does not initialize the dictionary -- instead, the user should set the
  * dictionary using the Dictionary() mutator method.
  *
+ * Once a dictionary is trained with Train(), another matrix may be encoded with
+ * the Encode() function.
+ *
  * @tparam DictionaryInitializationPolicy The class to use to initialize the
  *     dictionary; must have 'void Initialize(const arma::mat& data, arma::mat&
  *     dictionary)' function.
@@ -177,9 +180,13 @@ class SparseCoding
                  DictionaryInitializer());
 
   /**
-   * Sparse code each point via LARS.
+   * Sparse code each point in the given dataset via LARS, using the current
+   * dictionary and store the encoded data in the codes matrix.
+   *
+   * @param data Input data matrix to be encoded.
+   * @param codes Output codes matrix.
    */
-  void OptimizeCode(const arma::mat& data, arma::mat& codes);
+  void Encode(const arma::mat& data, arma::mat& codes);
 
   /**
    * Learn dictionary via Newton method based on Lagrange dual.
diff --git a/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp b/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp
index 57ccf32..1f09ec3 100644
--- a/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp
+++ b/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp
@@ -52,7 +52,7 @@ void SparseCoding::Train(
   Log::Info << "Initial coding step." << std::endl;
 
   arma::mat codes(atoms, data.n_cols);
-  OptimizeCode(data, codes);
+  Encode(data, codes);
   arma::uvec adjacencies = find(codes);
 
   Log::Info << "  Sparsity level: " << 100.0 * ((double) (adjacencies.n_elem))
@@ -77,7 +77,7 @@ void SparseCoding::Train(
 
     // Second step: perform the coding.
     Log::Info << "Performing coding step..." << std::endl;
-    OptimizeCode(data, codes);
+    Encode(data, codes);
     // Get the indices of all the nonzero elements in the codes.
     adjacencies = find(codes);
     Log::Info << "  Sparsity level: " << 100.0 * ((double) (adjacencies.n_elem))
diff --git a/src/mlpack/methods/sparse_coding/sparse_coding_main.cpp b/src/mlpack/methods/sparse_coding/sparse_coding_main.cpp
index d631f3f..e5ffce4 100644
--- a/src/mlpack/methods/sparse_coding/sparse_coding_main.cpp
+++ b/src/mlpack/methods/sparse_coding/sparse_coding_main.cpp
@@ -228,7 +228,7 @@ int main(int argc, char* argv[])
     }
 
     mat codes;
-    sc.OptimizeCode(matY, codes);
+    sc.Encode(matY, codes);
 
     if (CLI::HasParam("codes_file"))
       data::Save(CLI::GetParam<string>("codes_file"), codes);
diff --git a/src/mlpack/tests/sparse_coding_test.cpp b/src/mlpack/tests/sparse_coding_test.cpp
index 716e434..5129430 100644
--- a/src/mlpack/tests/sparse_coding_test.cpp
+++ b/src/mlpack/tests/sparse_coding_test.cpp
@@ -62,7 +62,7 @@ BOOST_AUTO_TEST_CASE(SparseCodingTestCodingStepLasso)
   SparseCoding sc(nAtoms, lambda1);
   mat Z;
   DataDependentRandomInitializer::Initialize(X, 25, sc.Dictionary());
-  sc.OptimizeCode(X, Z);
+  sc.Encode(X, Z);
 
   mat D = sc.Dictionary();
 
@@ -90,7 +90,7 @@ BOOST_AUTO_TEST_CASE(SparseCodingTestCodingStepElasticNet)
   SparseCoding sc(nAtoms, lambda1, lambda2);
   mat Z;
   DataDependentRandomInitializer::Initialize(X, 25, sc.Dictionary());
-  sc.OptimizeCode(X, Z);
+  sc.Encode(X, Z);
 
   mat D = sc.Dictionary();
 
@@ -122,7 +122,7 @@ BOOST_AUTO_TEST_CASE(SparseCodingTestDictionaryStep)
   SparseCoding sc(nAtoms, lambda1);
   mat Z;
   DataDependentRandomInitializer::Initialize(X, 25, sc.Dictionary());
-  sc.OptimizeCode(X, Z);
+  sc.Encode(X, Z);
 
   mat D = sc.Dictionary();
 
@@ -142,7 +142,7 @@ BOOST_AUTO_TEST_CASE(SerializationTest)
 
   mat Y = randu<mat>(100, 200);
   mat codes;
-  sc.OptimizeCode(Y, codes);
+  sc.Encode(Y, codes);
 
   SparseCoding scXml(50, 0.01), scText(nAtoms, 0.05), scBinary(0, 0.0);
   SerializeObjectAll(sc, scXml, scText, scBinary);
@@ -151,9 +151,9 @@ BOOST_AUTO_TEST_CASE(SerializationTest)
       scBinary.Dictionary());
 
   mat xmlCodes, textCodes, binaryCodes;
-  scXml.OptimizeCode(Y, xmlCodes);
-  scText.OptimizeCode(Y, textCodes);
-  scBinary.OptimizeCode(Y, binaryCodes);
+  scXml.Encode(Y, xmlCodes);
+  scText.Encode(Y, textCodes);
+  scBinary.Encode(Y, binaryCodes);
 
   CheckMatrices(codes, xmlCodes, textCodes, binaryCodes);
 



More information about the mlpack-git mailing list