[mlpack-git] master: add move constructor and move assignment (01d5c4b)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Thu Dec 31 12:19:00 EST 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/ea908deb6ae205b99ae8ba063b716c1bd726babd...29c0405173902f366d241e19dabfe3d679f8bea8
>---------------------------------------------------------------
commit 01d5c4b77852c44c3a1d3a0c8e9f26106d8497ce
Author: stereomatchingkiss <stereomatchingkiss at gmail.com>
Date: Fri Dec 11 17:29:46 2015 +0800
add move constructor and move assignment
>---------------------------------------------------------------
01d5c4b77852c44c3a1d3a0c8e9f26106d8497ce
src/mlpack/methods/ann/layer/conv_layer.hpp | 49 +++++++++++++++++++-----
src/mlpack/methods/ann/layer/dropout_layer.hpp | 23 ++++++++++-
src/mlpack/methods/ann/layer/lstm_layer.hpp | 47 +++++++++++++++++++++--
src/mlpack/methods/ann/layer/pooling_layer.hpp | 20 +++++++++-
src/mlpack/methods/ann/layer/recurrent_layer.hpp | 32 ++++++++++++++--
src/mlpack/methods/ann/layer/softmax_layer.hpp | 18 ++++++++-
6 files changed, 165 insertions(+), 24 deletions(-)
diff --git a/src/mlpack/methods/ann/layer/conv_layer.hpp b/src/mlpack/methods/ann/layer/conv_layer.hpp
index 506d567..1250984 100644
--- a/src/mlpack/methods/ann/layer/conv_layer.hpp
+++ b/src/mlpack/methods/ann/layer/conv_layer.hpp
@@ -88,6 +88,35 @@ class ConvLayer
weightInitRule.Initialize(weights, wfilter, hfilter, inMaps * outMaps);
}
+ ConvLayer(ConvLayer &&layer) noexcept
+ {
+ *this = std::move(layer);
+ }
+
+ ConvLayer& operator=(ConvLayer &&layer) noexcept
+ {
+ optimizer = layer.optimizer;
+ ownsOptimizer = layer.ownsOptimizer;
+ layer.optimizer = nullptr;
+ layer.ownsOptimizer = false;
+
+ wfilter = layer.wfilter;
+ hfilter = layer.hfilter;
+ inMaps = layer.inMaps;
+ outMaps = layer.outMaps;
+ xStride = layer.xStride;
+ yStride = layer.yStride;
+ wPad = layer.wPad;
+ hPad = layer.hPad;
+ weights.swap(layer.weights);
+ delta.swap(layer.delta);
+ gradient.swap(layer.gradient);
+ inputParameter.swap(layer.inputParameter);
+ outputParameter.swap(layer.outputParameter);
+
+ return *this;
+ }
+
/**
* Delete the convolution layer object and its optimizer.
*/
@@ -282,28 +311,28 @@ class ConvLayer
}
//! Locally-stored filter/kernel width.
- const size_t wfilter;
+ size_t wfilter;
//! Locally-stored filter/kernel height.
- const size_t hfilter;
+ size_t hfilter;
//! Locally-stored number of input maps.
- const size_t inMaps;
+ size_t inMaps;
//! Locally-stored number of output maps.
- const size_t outMaps;
+ size_t outMaps;
//! Locally-stored stride of the filter in x-direction.
- const size_t xStride;
+ size_t xStride;
//! Locally-stored stride of the filter in y-direction.
- const size_t yStride;
+ size_t yStride;
//! Locally-stored padding width.
- const size_t wPad;
+ size_t wPad;
//! Locally-stored padding height.
- const size_t hPad;
+ size_t hPad;
//! Locally-stored weight object.
OutputDataType weights;
@@ -359,7 +388,7 @@ class LayerTraits<ConvLayer<OptimizerType,
static const bool IsConnection = true;
};
-} // namespace ann
-} // namespace mlpack
+}; // namespace ann
+}; // namespace mlpack
#endif
diff --git a/src/mlpack/methods/ann/layer/dropout_layer.hpp b/src/mlpack/methods/ann/layer/dropout_layer.hpp
index 7bb3a0a..2c8dc96 100644
--- a/src/mlpack/methods/ann/layer/dropout_layer.hpp
+++ b/src/mlpack/methods/ann/layer/dropout_layer.hpp
@@ -66,6 +66,25 @@ class DropoutLayer
// Nothing to do here.
}
+ DropoutLayer(DropoutLayer &&layer) noexcept
+ {
+ *this = std::move(layer);
+ }
+
+ DropoutLayer& operator=(DropoutLayer &&layer) noexcept
+ {
+ delta.swap(layer.delta);
+ inputParameter.swap(layer.inputParameter);
+ outputParameter.swap(outputParameter);
+ mask.swap(layer.mask);
+ ratio = layer.ratio;
+ scale = layer.scale;
+ deterministic = layer.deterministic;
+ rescale = layer.rescale;
+
+ return *this;
+ }
+
/**
* Ordinary feed forward pass of the dropout layer.
*
@@ -231,7 +250,7 @@ template <
>
using DropoutLayer2D = DropoutLayer<InputDataType, OutputDataType>;
-} // namespace ann
-} // namespace mlpack
+}; // namespace ann
+}; // namespace mlpack
#endif
diff --git a/src/mlpack/methods/ann/layer/lstm_layer.hpp b/src/mlpack/methods/ann/layer/lstm_layer.hpp
index 2eabe78..8cffae1 100644
--- a/src/mlpack/methods/ann/layer/lstm_layer.hpp
+++ b/src/mlpack/methods/ann/layer/lstm_layer.hpp
@@ -83,6 +83,45 @@ class LSTMLayer
}
}
+ LSTMLayer(LSTMLayer &&layer) noexcept
+ {
+ *this = std::move(layer);
+ }
+
+ LSTMLayer& operator=(LSTMLayer &&layer) noexcept
+ {
+ optimizer = layer.optimizer;
+ ownsOptimizer = layer.ownsOptimizer;
+ layer.optimizer = nullptr;
+ layer.ownsOptimizer = false;
+
+ outSize = layer.outSize;
+ peepholes = layer.peepholes;
+ seqLen = layer.seqLen;
+ offset = layer.offset;
+ delta.swap(layer.delta);
+ gradient.swap(layer.gradient);
+ inputParameter.swap(layer.inputParameter);
+ outputParameter.swap(layer.outputParameter);
+ inGate.swap(layer.inGate);
+ inGateAct.swap(layer.inGateAct);
+ inGateError.swap(layer.inGateError);
+ outGate.swap(layer.outGate);
+ outGateAct.swap(layer.outGateAct);
+ outGateError.swap(layer.outGateError);
+ forgetGate.swap(layer.forgetGate);
+ forgetGateAct.swap(layer.forgetGateAct);
+ forgetGateError.swap(layer.forgetGateError);
+ state.swap(layer.state);
+ stateError.swap(layer.stateError);
+ cellAct.swap(layer.cellAct);
+ peepholeWeights.swap(layer.peepholeWeights);
+ peepholeDerivatives.swap(layer.peepholeDerivatives);
+ peepholeGradient.swap(layer.peepholeGradient);
+
+ return *this;
+ }
+
/**
* Delete the LSTMLayer object and its optimizer.
*/
@@ -309,10 +348,10 @@ class LSTMLayer
private:
//! Locally-stored number of output units.
- const size_t outSize;
+ size_t outSize;
//! Locally-stored peephole indication flag.
- const bool peepholes;
+ bool peepholes;
//! Locally-stored length of the the input sequence.
size_t seqLen;
@@ -419,7 +458,7 @@ class LayerTraits<LSTMLayer<OptimizerType,
static const bool IsConnection = false;
};
-} // namespace ann
-} // namespace mlpack
+}; // namespace ann
+}; // namespace mlpack
#endif
diff --git a/src/mlpack/methods/ann/layer/pooling_layer.hpp b/src/mlpack/methods/ann/layer/pooling_layer.hpp
index f5b7777..69455c8 100644
--- a/src/mlpack/methods/ann/layer/pooling_layer.hpp
+++ b/src/mlpack/methods/ann/layer/pooling_layer.hpp
@@ -45,6 +45,22 @@ class PoolingLayer
// Nothing to do here.
}
+ PoolingLayer(PoolingLayer &&layer) noexcept
+ {
+ *this = std::move(layer);
+ }
+
+ PoolingLayer& operator=(PoolingLayer &&layer) noexcept
+ {
+ kSize = layer.kSize;
+ delta.swap(layer.delta);
+ inputParameter.swap(layer.inputParameter);
+ outputParameter.swap(layer.outputParameter);
+ pooling = std::move(layer.pooling);
+
+ return *this;
+ }
+
/**
* Ordinary feed forward pass of a neural network, evaluating the function
* f(x) by propagating the activity forward through f.
@@ -235,7 +251,7 @@ class LayerTraits<PoolingLayer<PoolingRule, InputDataType, OutputDataType> >
};
-} // namespace ann
-} // namespace mlpack
+}; // namespace ann
+}; // namespace mlpack
#endif
diff --git a/src/mlpack/methods/ann/layer/recurrent_layer.hpp b/src/mlpack/methods/ann/layer/recurrent_layer.hpp
index d472fe6..42859f5 100644
--- a/src/mlpack/methods/ann/layer/recurrent_layer.hpp
+++ b/src/mlpack/methods/ann/layer/recurrent_layer.hpp
@@ -82,6 +82,30 @@ class RecurrentLayer
weightInitRule.Initialize(weights, outSize, inSize);
}
+ RecurrentLayer(RecurrentLayer &&layer) noexcept
+ {
+ *this = std::move(layer);
+ }
+
+ RecurrentLayer& operator=(RecurrentLayer &&layer) noexcept
+ {
+ optimizer = layer.optimizer;
+ ownsOptimizer = layer.ownsOptimizer;
+ layer.optimizer = nullptr;
+ layer.ownsOptimizer = false;
+
+ inSize = layer.inSize;
+ outSize = layer.outSize;
+ weights.swap(layer.weights);
+ delta.swap(layer.delta);
+ gradient.swap(layer.gradient);
+ inputParameter.swap(layer.inputParameter);
+ outputParameter.swap(layer.outputParameter);
+ recurrentParameter.swap(layer.recurrentParameter);
+
+ return *this;
+ }
+
/**
* Delete the RecurrentLayer object and its optimizer.
*/
@@ -183,10 +207,10 @@ class RecurrentLayer
private:
//! Locally-stored number of input units.
- const size_t inSize;
+ size_t inSize;
//! Locally-stored number of output units.
- const size_t outSize;
+ size_t outSize;
//! Locally-stored weight object.
OutputDataType weights;
@@ -234,7 +258,7 @@ class LayerTraits<RecurrentLayer<
static const bool IsConnection = true;
};
-} // namespace ann
-} // namespace mlpack
+}; // namespace ann
+}; // namespace mlpack
#endif
diff --git a/src/mlpack/methods/ann/layer/softmax_layer.hpp b/src/mlpack/methods/ann/layer/softmax_layer.hpp
index 655e196..490daa5 100644
--- a/src/mlpack/methods/ann/layer/softmax_layer.hpp
+++ b/src/mlpack/methods/ann/layer/softmax_layer.hpp
@@ -36,6 +36,20 @@ class SoftmaxLayer
// Nothing to do here.
}
+ SoftmaxLayer(SoftmaxLayer &&layer) noexcept
+ {
+ *this = std::move(layer);
+ }
+
+ SoftmaxLayer& operator=(SoftmaxLayer &&layer) noexcept
+ {
+ delta.swap(layer.delta);
+ inputParameter.swap(layer.inputParameter);
+ outputParameter.swap(layer.outputParameter);
+
+ return *this;
+ }
+
/**
* Ordinary feed forward pass of a neural network, evaluating the function
* f(x) by propagating the activity forward through f.
@@ -94,7 +108,7 @@ class SoftmaxLayer
OutputDataType outputParameter;
}; // class SoftmaxLayer
-} // namespace ann
-} // namespace mlpack
+}; // namespace ann
+}; // namespace mlpack
#endif
More information about the mlpack-git
mailing list