[mlpack-git] master: remove move constructor and assignement, let the compiler generated it.Remember to define ARMA_USE_CXX11 (32d4bad)

gitdub at mlpack.org gitdub at mlpack.org
Sat Feb 27 03:36:57 EST 2016


Repository : https://github.com/mlpack/mlpack
On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/c25a2b65e14e86e8c5d1a0c672893c33e337bc0f...a2e57d617e952f1ea2fda8a23e1c6bd2f78beb6d

>---------------------------------------------------------------

commit 32d4badb3adc3b3174ee12618e4db28d7f1bcddd
Author: stereomatchingkiss <stereomatchingkiss at gmail.com>
Date:   Sat Feb 27 16:36:57 2016 +0800

    remove move constructor and assignement, let the compiler generated it.Remember to define ARMA_USE_CXX11


>---------------------------------------------------------------

32d4badb3adc3b3174ee12618e4db28d7f1bcddd
 src/mlpack/methods/ann/layer/conv_layer.hpp        | 20 ------------------
 src/mlpack/methods/ann/layer/dropout_layer.hpp     | 16 ---------------
 src/mlpack/methods/ann/layer/linear_layer.hpp      | 14 -------------
 src/mlpack/methods/ann/layer/lstm_layer.hpp        | 15 --------------
 .../ann/layer/multiclass_classification_layer.hpp  |  8 ++++++++
 src/mlpack/methods/ann/layer/one_hot_layer.hpp     |  8 ++++++++
 src/mlpack/methods/ann/layer/pooling_layer.hpp     | 24 ++++++++--------------
 src/mlpack/methods/ann/layer/recurrent_layer.hpp   | 14 -------------
 src/mlpack/methods/ann/layer/softmax_layer.hpp     | 14 -------------
 9 files changed, 24 insertions(+), 109 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/conv_layer.hpp b/src/mlpack/methods/ann/layer/conv_layer.hpp
index 06f38a8..91c0903 100644
--- a/src/mlpack/methods/ann/layer/conv_layer.hpp
+++ b/src/mlpack/methods/ann/layer/conv_layer.hpp
@@ -70,26 +70,6 @@ class ConvLayer
     weights.set_size(wfilter, hfilter, inMaps * outMaps);
   }
   
-  ConvLayer(ConvLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  ConvLayer& operator=(ConvLayer &&layer) noexcept
-  {
-    wfilter = layer.wfilter;
-    hfilter = layer.hfilter;
-    inMaps = layer.inMaps;
-    outMaps = layer.outMaps;
-    xStride = layer.xStride;
-    yStride = layer.yStride;
-    wPad = layer.wPad;
-    hPad = layer.hPad;
-    weights.swap(layer.weights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/dropout_layer.hpp b/src/mlpack/methods/ann/layer/dropout_layer.hpp
index 4fa46af..c9da721 100644
--- a/src/mlpack/methods/ann/layer/dropout_layer.hpp
+++ b/src/mlpack/methods/ann/layer/dropout_layer.hpp
@@ -66,22 +66,6 @@ class DropoutLayer
     // Nothing to do here.
   }  
 
-  DropoutLayer(DropoutLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  DropoutLayer& operator=(DropoutLayer &&layer) noexcept
-  {
-    mask.swap(layer.mask);
-    ratio = layer.ratio;
-    scale = layer.scale;
-    deterministic = layer.deterministic;
-    rescale = layer.rescale;
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of the dropout layer.
    *
diff --git a/src/mlpack/methods/ann/layer/linear_layer.hpp b/src/mlpack/methods/ann/layer/linear_layer.hpp
index f059bb3..1c3a1fa 100644
--- a/src/mlpack/methods/ann/layer/linear_layer.hpp
+++ b/src/mlpack/methods/ann/layer/linear_layer.hpp
@@ -43,20 +43,6 @@ class LinearLayer
     weights.set_size(outSize, inSize);
   }
 
-  LinearLayer(LinearLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  LinearLayer& operator=(LinearLayer &&layer) noexcept
-  {
-    inSize = layer.inSize;
-    outSize = layer.outSize;
-    weights.swap(layer.weights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/lstm_layer.hpp b/src/mlpack/methods/ann/layer/lstm_layer.hpp
index 0ac04e7..ee57456 100644
--- a/src/mlpack/methods/ann/layer/lstm_layer.hpp
+++ b/src/mlpack/methods/ann/layer/lstm_layer.hpp
@@ -61,21 +61,6 @@ class LSTMLayer
     }
   }  
 
-  LSTMLayer(LSTMLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  LSTMLayer& operator=(LSTMLayer &&layer) noexcept
-  {
-    outSize = layer.outSize;
-    seqLen = layer.seqLen;
-
-    peepholeWeights.swap(layer.peepholeWeights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/multiclass_classification_layer.hpp b/src/mlpack/methods/ann/layer/multiclass_classification_layer.hpp
index f74ac28..43f8754 100644
--- a/src/mlpack/methods/ann/layer/multiclass_classification_layer.hpp
+++ b/src/mlpack/methods/ann/layer/multiclass_classification_layer.hpp
@@ -61,6 +61,14 @@ class MulticlassClassificationLayer
   {
     output = inputActivations;
   }
+  
+  /**
+   * Serialize the layer
+   */
+  template<typename Archive>
+  void Serialize(Archive& ar, const unsigned int /* version */)
+  {    			
+  }
 }; // class MulticlassClassificationLayer
 
 //! Layer traits for the multiclass classification layer.
diff --git a/src/mlpack/methods/ann/layer/one_hot_layer.hpp b/src/mlpack/methods/ann/layer/one_hot_layer.hpp
index e820632..a4dc6f4 100644
--- a/src/mlpack/methods/ann/layer/one_hot_layer.hpp
+++ b/src/mlpack/methods/ann/layer/one_hot_layer.hpp
@@ -62,6 +62,14 @@ class OneHotLayer
     inputActivations.max(maxIndex);
     output(maxIndex) = 1;
   }
+  
+  /**
+   * Serialize the layer
+   */
+  template<typename Archive>
+  void Serialize(Archive& ar, const unsigned int /* version */)
+  {    			
+  }
 }; // class OneHotLayer
 
 //! Layer traits for the one-hot class classification layer.
diff --git a/src/mlpack/methods/ann/layer/pooling_layer.hpp b/src/mlpack/methods/ann/layer/pooling_layer.hpp
index fdaefad..64add9b 100644
--- a/src/mlpack/methods/ann/layer/pooling_layer.hpp
+++ b/src/mlpack/methods/ann/layer/pooling_layer.hpp
@@ -45,22 +45,6 @@ class PoolingLayer
     // Nothing to do here.
   }  
 
-  PoolingLayer(PoolingLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  PoolingLayer& operator=(PoolingLayer &&layer) noexcept
-  {
-    kSize = layer.kSize;
-    delta.swap(layer.delta);
-    inputParameter.swap(layer.inputParameter);
-    outputParameter.swap(layer.outputParameter);
-    pooling = std::move(layer.pooling);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
@@ -163,6 +147,14 @@ class PoolingLayer
   //! Modify the delta.
   OutputDataType& Delta() { return delta; }
   
+  /**
+   * Serialize the layer
+   */
+  template<typename Archive>
+  void Serialize(Archive& ar, const unsigned int /* version */)
+  {    			
+  }
+
  private:
   /**
    * Apply pooling to the input and store the results.
diff --git a/src/mlpack/methods/ann/layer/recurrent_layer.hpp b/src/mlpack/methods/ann/layer/recurrent_layer.hpp
index 332a659..729179f 100644
--- a/src/mlpack/methods/ann/layer/recurrent_layer.hpp
+++ b/src/mlpack/methods/ann/layer/recurrent_layer.hpp
@@ -57,20 +57,6 @@ class RecurrentLayer
     weights.set_size(outSize, inSize);
   }  
 
-  RecurrentLayer(RecurrentLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  RecurrentLayer& operator=(RecurrentLayer &&layer) noexcept
-  {
-    inSize = layer.inSize;
-    outSize = layer.outSize;
-    weights.swap(layer.weights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/softmax_layer.hpp b/src/mlpack/methods/ann/layer/softmax_layer.hpp
index 12e0146..4bfd27d 100644
--- a/src/mlpack/methods/ann/layer/softmax_layer.hpp
+++ b/src/mlpack/methods/ann/layer/softmax_layer.hpp
@@ -36,20 +36,6 @@ class SoftmaxLayer
     // Nothing to do here.
   }  
 
-  SoftmaxLayer(SoftmaxLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  SoftmaxLayer& operator=(SoftmaxLayer &&layer) noexcept
-  {
-    delta.swap(layer.delta);
-    inputParameter.swap(layer.inputParameter);
-    outputParameter.swap(layer.outputParameter);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.




More information about the mlpack-git mailing list