[mlpack-git] master: add move constructor and move assignment (cb0b5e5)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Dec 31 12:18:58 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/ea908deb6ae205b99ae8ba063b716c1bd726babd...29c0405173902f366d241e19dabfe3d679f8bea8

>---------------------------------------------------------------

commit cb0b5e5227de46c71b91d2bfbbd00b47467c07ce
Author: stereomatchingkiss <stereomatchingkiss at gmail.com>
Date:   Thu Dec 10 19:48:41 2015 +0800

    add move constructor and move assignment


>---------------------------------------------------------------

cb0b5e5227de46c71b91d2bfbbd00b47467c07ce
 src/mlpack/methods/ann/layer/base_layer.hpp   | 14 ++++++++++++++
 src/mlpack/methods/ann/layer/bias_layer.hpp   | 25 ++++++++++++++++++++++++-
 src/mlpack/methods/ann/layer/linear_layer.hpp | 27 +++++++++++++++++++++++++--
 3 files changed, 63 insertions(+), 3 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/base_layer.hpp b/src/mlpack/methods/ann/layer/base_layer.hpp
index 12f608e..d364c96 100644
--- a/src/mlpack/methods/ann/layer/base_layer.hpp
+++ b/src/mlpack/methods/ann/layer/base_layer.hpp
@@ -51,6 +51,20 @@ class BaseLayer
     // Nothing to do here.
   }
   
+  BaseLayer(BaseLayer &&layer) noexcept
+  {
+    *this = std::move(layer);
+  }
+
+  BaseLayer& operator=(BaseLayer &&layer) noexcept
+  {
+    delta.swap(layer.delta);
+    inputParameter.swap(layer.inputParameter);
+    outputParameter.swap(layer.outputParameter);
+
+    return *this;
+  }
+
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
    * f(x) by propagating the activity forward through f.
diff --git a/src/mlpack/methods/ann/layer/bias_layer.hpp b/src/mlpack/methods/ann/layer/bias_layer.hpp
index a762e26..59061d9 100644
--- a/src/mlpack/methods/ann/layer/bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/bias_layer.hpp
@@ -63,6 +63,29 @@ class BiasLayer
     weightInitRule.Initialize(weights, outSize, 1);
   }
   
+  BiasLayer(BiasLayer &&layer) noexcept
+  {
+    *this = std::move(layer);
+  }
+
+  BiasLayer& operator=(BiasLayer &&layer) noexcept
+  {
+    optimizer = layer.optimizer;
+    layer.optimizer = nullptr;
+    ownsOptimizer = layer.ownsOptimizer;
+    layer.ownsOptimizer = false;
+
+    outSize = layer.outSize;
+    bias = layer.bias;
+    weights.swap(layer.weights);
+    delta.swap(layer.delta);
+    gradient.swap(layer.gradient);
+    inputParameter.swap(layer.inputParameter);
+    outputParameter.swap(layer.outputParameter);
+
+    return *this;
+  }
+
   /**
    * Delete the bias layer object and its optimizer.
    */
@@ -191,7 +214,7 @@ class BiasLayer
 
  private:
   //! Locally-stored number of output units.
-  const size_t outSize;
+  size_t outSize;
 
   //! Locally-stored bias value.
   double bias;
diff --git a/src/mlpack/methods/ann/layer/linear_layer.hpp b/src/mlpack/methods/ann/layer/linear_layer.hpp
index 8537b77..a556862 100644
--- a/src/mlpack/methods/ann/layer/linear_layer.hpp
+++ b/src/mlpack/methods/ann/layer/linear_layer.hpp
@@ -59,6 +59,29 @@ class LinearLayer
     weightInitRule.Initialize(weights, outSize, inSize);
   }
   
+  LinearLayer(LinearLayer &&layer) noexcept
+  {
+    *this = std::move(layer);
+  }
+
+  LinearLayer& operator=(LinearLayer &&layer) noexcept
+  {
+    ownsOptimizer = layer.ownsOptimizer;
+    layer.ownsOptimizer = false;
+    optimizer = layer.optimizer;
+    layer.optimizer = nullptr;
+
+    inSize = layer.inSize;
+    outSize = layer.outSize;
+    weights.swap(layer.weights);
+    delta.swap(layer.delta);
+    gradient.swap(layer.gradient);
+    inputParameter.swap(layer.inputParameter);
+    outputParameter.swap(layer.outputParameter);
+
+    return *this;
+  }
+
   /**
    * Delete the linear layer object and its optimizer.
    */
@@ -262,10 +285,10 @@ class LinearLayer
   }
 
   //! Locally-stored number of input units.
-  const size_t inSize;
+  size_t inSize;
 
   //! Locally-stored number of output units.
-  const size_t outSize;
+  size_t outSize;
 
   //! Locally-stored weight object.
   OutputDataType weights;



More information about the mlpack-git mailing list