[mlpack-git] master: Serialize all necessary parameter and minor formatting fixes. (bb8507c)

gitdub at mlpack.org gitdub at mlpack.org
Wed Mar 2 12:27:54 EST 2016


Repository : https://github.com/mlpack/mlpack
On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/1bedf15126f6bd0bc93a3233914ac95486a3c0da...bb8507c561d5e135ba2a6a168ad9d0d5735fc85b

>---------------------------------------------------------------

commit bb8507c561d5e135ba2a6a168ad9d0d5735fc85b
Author: marcus <marcus.edel at fu-berlin.de>
Date:   Wed Mar 2 18:27:54 2016 +0100

    Serialize all necessary parameter and minor formatting fixes.


>---------------------------------------------------------------

bb8507c561d5e135ba2a6a168ad9d0d5735fc85b
 src/mlpack/methods/ann/ffn_impl.hpp                        |  0
 src/mlpack/methods/ann/layer/base_layer.hpp                |  5 +++--
 src/mlpack/methods/ann/layer/bias_layer.hpp                |  3 ++-
 .../methods/ann/layer/binary_classification_layer.hpp      |  5 +++--
 src/mlpack/methods/ann/layer/conv_layer.hpp                | 10 +++++++++-
 src/mlpack/methods/ann/layer/dropout_layer.hpp             |  4 +++-
 src/mlpack/methods/ann/layer/linear_layer.hpp              |  0
 src/mlpack/methods/ann/layer/lstm_layer.hpp                | 14 +++++++++++++-
 src/mlpack/methods/ann/layer/one_hot_layer.hpp             |  5 +++--
 src/mlpack/methods/ann/layer/pooling_layer.hpp             |  4 +++-
 src/mlpack/methods/ann/layer/recurrent_layer.hpp           |  2 +-
 src/mlpack/methods/ann/layer/softmax_layer.hpp             |  5 +++--
 src/mlpack/methods/ann/layer/sparse_bias_layer.hpp         |  3 ++-
 src/mlpack/methods/ann/layer/sparse_input_layer.hpp        |  3 ++-
 src/mlpack/methods/ann/layer/sparse_output_layer.hpp       |  5 ++++-
 src/mlpack/methods/ann/rnn_impl.hpp                        |  0
 16 files changed, 51 insertions(+), 17 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/base_layer.hpp b/src/mlpack/methods/ann/layer/base_layer.hpp
index 1cbeaaf..876d87e 100644
--- a/src/mlpack/methods/ann/layer/base_layer.hpp
+++ b/src/mlpack/methods/ann/layer/base_layer.hpp
@@ -135,11 +135,12 @@ class BaseLayer
   OutputDataType& Delta() { return delta; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
-  void Serialize(Archive& ar, const unsigned int /* version */)
+  void Serialize(Archive& /* ar */, const unsigned int /* version */)
   {
+    /* Nothing to do here */
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/bias_layer.hpp b/src/mlpack/methods/ann/layer/bias_layer.hpp
index 007fa54..10b8b90 100644
--- a/src/mlpack/methods/ann/layer/bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/bias_layer.hpp
@@ -148,12 +148,13 @@ class BiasLayer
   InputDataType& Gradient() { return gradient; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
     ar & data::CreateNVP(weights, "weights");
+    ar & CreateNVP(bias, "bias");
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/binary_classification_layer.hpp b/src/mlpack/methods/ann/layer/binary_classification_layer.hpp
index 9761458..368c1ec 100644
--- a/src/mlpack/methods/ann/layer/binary_classification_layer.hpp
+++ b/src/mlpack/methods/ann/layer/binary_classification_layer.hpp
@@ -62,11 +62,12 @@ class BinaryClassificationLayer
   }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
-  void Serialize(Archive& ar, const unsigned int /* version */)
+  void Serialize(Archive& /* ar */, const unsigned int /* version */)
   {
+    /* Nothing to do here */
   }
 }; // class BinaryClassificationLayer
 
diff --git a/src/mlpack/methods/ann/layer/conv_layer.hpp b/src/mlpack/methods/ann/layer/conv_layer.hpp
index 21e3f6f..aeb6c07 100644
--- a/src/mlpack/methods/ann/layer/conv_layer.hpp
+++ b/src/mlpack/methods/ann/layer/conv_layer.hpp
@@ -185,12 +185,20 @@ class ConvLayer
   OutputDataType& Gradient() { return gradient; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
     ar & data::CreateNVP(weights, "weights");
+    ar & data::CreateNVP(wfilter, "wfilter");
+    ar & data::CreateNVP(hfilter, "hfilter");
+    ar & data::CreateNVP(inMaps, "inMaps");
+    ar & data::CreateNVP(outMaps, "outMaps");
+    ar & data::CreateNVP(xStride, "xStride");
+    ar & data::CreateNVP(yStride, "yStride");
+    ar & data::CreateNVP(wPad, "wPad");
+    ar & data::CreateNVP(hPad, "hPad");
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/dropout_layer.hpp b/src/mlpack/methods/ann/layer/dropout_layer.hpp
index b191583..05d38c6 100644
--- a/src/mlpack/methods/ann/layer/dropout_layer.hpp
+++ b/src/mlpack/methods/ann/layer/dropout_layer.hpp
@@ -182,11 +182,13 @@ class DropoutLayer
   bool& Rescale() {return rescale; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
+    ar & data::CreateNVP(ratio, "ratio");
+    ar & data::CreateNVP(rescale, "rescale");
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/lstm_layer.hpp b/src/mlpack/methods/ann/layer/lstm_layer.hpp
index d424676..59c103c 100644
--- a/src/mlpack/methods/ann/layer/lstm_layer.hpp
+++ b/src/mlpack/methods/ann/layer/lstm_layer.hpp
@@ -278,11 +278,23 @@ class LSTMLayer
   size_t& SeqLen() { return seqLen; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
+    ar & data::CreateNVP(peepholes, "peepholes");
+
+    if (peepholes)
+    {
+      ar & data::CreateNVP(peepholeWeights, "peepholeWeights");
+
+      if (Archive::is_loading::value)
+      {
+        peepholeDerivatives = arma::zeros<OutputDataType>(
+            peepholeWeights.n_rows, 3);
+      }
+    }
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/one_hot_layer.hpp b/src/mlpack/methods/ann/layer/one_hot_layer.hpp
index a4dc6f4..34458c3 100644
--- a/src/mlpack/methods/ann/layer/one_hot_layer.hpp
+++ b/src/mlpack/methods/ann/layer/one_hot_layer.hpp
@@ -64,11 +64,12 @@ class OneHotLayer
   }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
-  void Serialize(Archive& ar, const unsigned int /* version */)
+  void Serialize(Archive& /* ar */, const unsigned int /* version */)
   {
+    /* Nothing to do here */
   }
 }; // class OneHotLayer
 
diff --git a/src/mlpack/methods/ann/layer/pooling_layer.hpp b/src/mlpack/methods/ann/layer/pooling_layer.hpp
index 78e929f..e19ddce 100644
--- a/src/mlpack/methods/ann/layer/pooling_layer.hpp
+++ b/src/mlpack/methods/ann/layer/pooling_layer.hpp
@@ -148,11 +148,13 @@ class PoolingLayer
   OutputDataType& Delta() { return delta; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
+    ar & data::CreateNVP(kSize, "kSize");
+    ar & data::CreateNVP(pooling, "pooling");
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/recurrent_layer.hpp b/src/mlpack/methods/ann/layer/recurrent_layer.hpp
index 55f3acd..639852e 100644
--- a/src/mlpack/methods/ann/layer/recurrent_layer.hpp
+++ b/src/mlpack/methods/ann/layer/recurrent_layer.hpp
@@ -130,7 +130,7 @@ class RecurrentLayer
   OutputDataType& Gradient() { return gradient; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
diff --git a/src/mlpack/methods/ann/layer/softmax_layer.hpp b/src/mlpack/methods/ann/layer/softmax_layer.hpp
index 0875084..151ebfe 100644
--- a/src/mlpack/methods/ann/layer/softmax_layer.hpp
+++ b/src/mlpack/methods/ann/layer/softmax_layer.hpp
@@ -84,11 +84,12 @@ class SoftmaxLayer
   InputDataType& Delta() { return delta; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
-  void Serialize(Archive& ar, const unsigned int /* version */)
+  void Serialize(Archive& /* ar */, const unsigned int /* version */)
   {
+    /* Nothing to do here */
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp b/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp
index c166dda..9b79536 100644
--- a/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp
@@ -118,12 +118,13 @@ class SparseBiasLayer
   InputDataType& Gradient() { return gradient; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
     ar & data::CreateNVP(weights, "weights");
+    ar & data::CreateNVP(batchSize, "batchSize");
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/sparse_input_layer.hpp b/src/mlpack/methods/ann/layer/sparse_input_layer.hpp
index d5384da..ce5ce7d 100644
--- a/src/mlpack/methods/ann/layer/sparse_input_layer.hpp
+++ b/src/mlpack/methods/ann/layer/sparse_input_layer.hpp
@@ -119,12 +119,13 @@ class SparseInputLayer
   OutputDataType& Gradient() { return gradient; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
     ar & data::CreateNVP(weights, "weights");
+    ar & data::CreateNVP(lambda, "lambda");
   }
 
  private:
diff --git a/src/mlpack/methods/ann/layer/sparse_output_layer.hpp b/src/mlpack/methods/ann/layer/sparse_output_layer.hpp
index 3e647d6..3022e2a 100644
--- a/src/mlpack/methods/ann/layer/sparse_output_layer.hpp
+++ b/src/mlpack/methods/ann/layer/sparse_output_layer.hpp
@@ -157,12 +157,15 @@ class SparseOutputLayer
   OutputDataType& Gradient() { return gradient; }
   
   /**
-   * Serialize the layer
+   * Serialize the layer.
    */
   template<typename Archive>
   void Serialize(Archive& ar, const unsigned int /* version */)
   {
     ar & data::CreateNVP(weights, "weights");
+    ar & data::CreateNVP(lambda, "lambda");
+    ar & data::CreateNVP(beta, "beta");
+    ar & data::CreateNVP(rho, "rho");
   }
 
  private:




More information about the mlpack-git mailing list