[mlpack-git] master: remove move constructor and assignement, let the compiler generated it(Need to define ARMA_USE_CXX11) (99a6ada)

gitdub at mlpack.org gitdub at mlpack.org
Sat Feb 27 03:31:31 EST 2016


Repository : https://github.com/mlpack/mlpack
On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/c25a2b65e14e86e8c5d1a0c672893c33e337bc0f...a2e57d617e952f1ea2fda8a23e1c6bd2f78beb6d

>---------------------------------------------------------------

commit 99a6ada43f6be93ef85205a07bb14e3508fba050
Author: stereomatchingkiss <stereomatchingkiss at gmail.com>
Date:   Sat Feb 27 16:31:19 2016 +0800

    remove move constructor and assignement, let the compiler generated
    it(Need to define ARMA_USE_CXX11)


>---------------------------------------------------------------

99a6ada43f6be93ef85205a07bb14e3508fba050
 src/mlpack/methods/ann/layer/sparse_bias_layer.hpp   | 14 --------------
 src/mlpack/methods/ann/layer/sparse_input_layer.hpp  | 15 ---------------
 src/mlpack/methods/ann/layer/sparse_output_layer.hpp | 17 -----------------
 3 files changed, 46 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp b/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp
index 2723a98..8fc33c3 100644
--- a/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/sparse_bias_layer.hpp
@@ -44,20 +44,6 @@ class SparseBiasLayer
     weights.set_size(outSize, 1);
   }  
 
-  SparseBiasLayer(SparseBiasLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  SparseBiasLayer& operator=(SparseBiasLayer &&layer) noexcept
-  {
-    outSize = layer.outSize;
-    batchSize = layer.batchSize;
-    weights.swap(layer.weights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/sparse_input_layer.hpp b/src/mlpack/methods/ann/layer/sparse_input_layer.hpp
index efdb777..9b5f274 100644
--- a/src/mlpack/methods/ann/layer/sparse_input_layer.hpp
+++ b/src/mlpack/methods/ann/layer/sparse_input_layer.hpp
@@ -49,21 +49,6 @@ class SparseInputLayer
     weights.set_size(outSize, inSize);
   }  
 
-  SparseInputLayer(SparseInputLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  SparseInputLayer& operator=(SparseInputLayer &&layer) noexcept
-  {
-    inSize = layer.inSize;
-    outSize = layer.outSize;
-    lambda = layer.lambda;
-    weights.swap(layer.weights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/sparse_output_layer.hpp b/src/mlpack/methods/ann/layer/sparse_output_layer.hpp
index 950081a..ae1597f 100644
--- a/src/mlpack/methods/ann/layer/sparse_output_layer.hpp
+++ b/src/mlpack/methods/ann/layer/sparse_output_layer.hpp
@@ -49,23 +49,6 @@ class SparseOutputLayer
     weights.set_size(outSize, inSize);
   }
 
-  SparseOutputLayer(SparseOutputLayer &&layer) noexcept
-  {
-    *this = std::move(layer);
-  }
-
-  SparseOutputLayer& operator=(SparseOutputLayer &&layer) noexcept
-  {
-    beta = layer.beta;
-    rho = layer.rho;
-    lambda = layer.lambda;
-    inSize = layer.inSize;
-    outSize = layer.outSize;
-    weights.swap(layer.weights);
-
-    return *this;
-  }
-
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.




More information about the mlpack-git mailing list