[mlpack-git] master: Refactor the bias-, softmax- and lstm-layer to pass the convolutional neural network change. (6644370)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Jun 4 04:47:10 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/2f479f388ee3d34e4a20535c3662b1921a4c6c06...7fb32130bd683cf03a853ea2bc6960e80d625955

>---------------------------------------------------------------

commit 66443701113ccbb94202f4d670aecdd6ba0f86d8
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Wed Jun 3 11:04:02 2015 +0200

    Refactor the bias-, softmax- and lstm-layer to pass the convolutional neural network change.


>---------------------------------------------------------------

66443701113ccbb94202f4d670aecdd6ba0f86d8
 src/mlpack/methods/ann/layer/bias_layer.hpp    | 90 ++++++++++++++++----------
 src/mlpack/methods/ann/layer/lstm_layer.hpp    | 20 ++++--
 src/mlpack/methods/ann/layer/softmax_layer.hpp | 19 +++---
 3 files changed, 79 insertions(+), 50 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/bias_layer.hpp b/src/mlpack/methods/ann/layer/bias_layer.hpp
index 90da231..60352a8 100644
--- a/src/mlpack/methods/ann/layer/bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/bias_layer.hpp
@@ -5,8 +5,8 @@
  * Definition of the BiasLayer class, which implements a standard bias
  * layer.
  */
-#ifndef __MLPACK_METHOS_ANN_LAYER_BIAS_LAYER_HPP
-#define __MLPACK_METHOS_ANN_LAYER_BIAS_LAYER_HPP
+#ifndef __MLPACK_METHODS_ANN_LAYER_BIAS_LAYER_HPP
+#define __MLPACK_METHODS_ANN_LAYER_BIAS_LAYER_HPP
 
 #include <mlpack/core.hpp>
 #include <mlpack/methods/ann/layer/layer_traits.hpp>
@@ -20,13 +20,11 @@ namespace ann /** Artificial Neural Network. */ {
  *
  * @tparam ActivationFunction Activation function used for the bias layer
  * (Default IdentityFunction).
- * @tparam MatType Type of data (arma::mat or arma::sp_mat).
- * @tparam VecType Type of data (arma::colvec, arma::mat or arma::sp_mat).
+ * @tparam DataType Type of data (arma::colvec, arma::mat or arma::sp_mat).
  */
 template <
     class ActivationFunction = IdentityFunction,
-    typename MatType = arma::mat,
-    typename VecType = arma::colvec
+    typename DataType = arma::colvec
 >
 class BiasLayer
 
@@ -38,9 +36,12 @@ class BiasLayer
    * @param layerSize The number of neurons.
    */
   BiasLayer(const size_t layerSize) :
-      inputActivations(arma::ones<VecType>(layerSize)),
-      delta(arma::zeros<VecType>(layerSize)),
-      layerSize(layerSize)
+      inputActivations(arma::ones<DataType>(layerSize)),
+      delta(arma::zeros<DataType>(layerSize)),
+      layerRows(layerSize),
+      layerCols(1),
+      layerSlices(1),
+      outputMaps(1)
   {
     // Nothing to do here.
   }
@@ -53,7 +54,7 @@ class BiasLayer
    * activity function.
    * @param outputActivation Data to store the resulting output activation.
    */
-  void FeedForward(const VecType& inputActivation, VecType& outputActivation)
+  void FeedForward(const DataType& inputActivation, DataType& outputActivation)
   {
     ActivationFunction::fn(inputActivation, outputActivation);
   }
@@ -68,58 +69,77 @@ class BiasLayer
    * @param delta The calculating delta using the partial derivative of the
    * error with respect to a weight.
    */
-  void FeedBackward(const VecType& inputActivation,
-                    const VecType& error,
-                    VecType& delta)
+  void FeedBackward(const DataType& inputActivation,
+                    const DataType& error,
+                    DataType& delta)
   {
-    VecType derivative;
+    DataType derivative;
     ActivationFunction::deriv(inputActivation, derivative);
 
     delta = error % derivative;
   }
 
   //! Get the input activations.
-  const VecType& InputActivation() const { return inputActivations; }
-  //  //! Modify the input activations.
-  VecType& InputActivation() { return inputActivations; }
+  const DataType& InputActivation() const { return inputActivations; }
+  //! Modify the input activations.
+  DataType& InputActivation() { return inputActivations; }
 
   //! Get the detla.
-  VecType& Delta() const { return delta; }
- //  //! Modify the delta.
-  VecType& Delta() { return delta; }
+  DataType& Delta() const { return delta; }
+  //! Modify the delta.
+  DataType& Delta() { return delta; }
 
   //! Get input size.
-  size_t InputSize() const { return layerSize; }
-  //  //! Modify the delta.
-  size_t& InputSize() { return layerSize; }
+  size_t InputSize() const { return layerRows; }
+  //! Modify the delta.
+  size_t& InputSize() { return layerRows; }
 
   //! Get output size.
-  size_t OutputSize() const { return layerSize; }
+  size_t OutputSize() const { return layerRows; }
   //! Modify the output size.
-  size_t& OutputSize() { return layerSize; }
+  size_t& OutputSize() { return layerRows; }
+
+  //! Get the number of layer rows.
+  size_t LayerRows() const { return layerRows; }
+  //! Modify the number of layer rows.
+  size_t& LayerRows() { return layerRows; }
+
+  //! Get the number of layer columns.
+  size_t LayerCols() const { return layerCols; }
+  //! Modify the number of layer columns.
+  size_t& LayerCols() { return layerCols; }
 
   //! Get the number of layer slices.
-  size_t LayerSlices() const { return 1; }
+  size_t LayerSlices() const { return layerSlices; }
+
+  //! Get the number of output maps.
+  size_t OutputMaps() const { return outputMaps; }
 
  private:
   //! Locally-stored input activation object.
-  VecType inputActivations;
+  DataType inputActivations;
 
   //! Locally-stored delta object.
-  VecType delta;
+  DataType delta;
+
+  //! Locally-stored number of layer rows.
+  size_t layerRows;
 
-  //! Locally-stored number of neurons.
-  size_t layerSize;
+  //! Locally-stored number of layer cols.
+  size_t layerCols;
+
+  //! Locally-stored number of layer slices.
+  size_t layerSlices;
+
+  //! Locally-stored number of output maps.
+  size_t outputMaps;
 }; // class BiasLayer
 
 //! Layer traits for the bias layer.
-template<typename ActivationFunction, typename MatType, typename VecType>
-class LayerTraits<BiasLayer<ActivationFunction, MatType, VecType> >
+template<typename ActivationFunction, typename DataType>
+class LayerTraits<BiasLayer<ActivationFunction, DataType> >
 {
  public:
-  /**
-   * If true, then the layer is binary.
-   */
   static const bool IsBinary = false;
   static const bool IsOutputLayer = false;
   static const bool IsBiasLayer = true;
diff --git a/src/mlpack/methods/ann/layer/lstm_layer.hpp b/src/mlpack/methods/ann/layer/lstm_layer.hpp
index 882d781..2a4a482 100644
--- a/src/mlpack/methods/ann/layer/lstm_layer.hpp
+++ b/src/mlpack/methods/ann/layer/lstm_layer.hpp
@@ -5,8 +5,8 @@
  * Definition of the LSTMLayer class, which implements a lstm network
  * layer.
  */
-#ifndef __MLPACK_METHOS_ANN_LAYER_LSTM_LAYER_HPP
-#define __MLPACK_METHOS_ANN_LAYER_LSTM_LAYER_HPP
+#ifndef __MLPACK_METHODS_ANN_LAYER_LSTM_LAYER_HPP
+#define __MLPACK_METHODS_ANN_LAYER_LSTM_LAYER_HPP
 
 #include <mlpack/core.hpp>
 #include <mlpack/methods/ann/layer/layer_traits.hpp>
@@ -303,7 +303,7 @@ class LSTMLayer
 
   //! Get the input activations.
   const VecType& InputActivation() const { return inputActivations; }
- //  //! Modify the input activations.
+  //! Modify the input activations.
   VecType& InputActivation() { return inputActivations; }
 
   //! Get input size.
@@ -314,12 +314,21 @@ class LSTMLayer
   //! Modify the output size.
   size_t& OutputSize() { return layerSize; }
 
+  //! Get the number of output maps.
+  size_t OutputMaps() const { return 1; }
+
   //! Get the number of layer slices.
   size_t LayerSlices() const { return 1; }
 
+  //! Get the number of layer rows.
+  size_t LayerRows() const { return layerSize; }
+
+  //! Get the number of layer columns.
+  size_t LayerCols() const { return 1; }
+
   //! Get the detla.
   VecType& Delta() const { return delta; }
- //  //! Modify the delta.
+  //! Modify the delta.
   VecType& Delta() { return delta; }
 
   //! Get the sequence length.
@@ -456,9 +465,6 @@ class LayerTraits<
 >
 {
  public:
-  /**
-   * If true, then the layer is binary.
-   */
   static const bool IsBinary = false;
   static const bool IsOutputLayer = false;
   static const bool IsBiasLayer = false;
diff --git a/src/mlpack/methods/ann/layer/softmax_layer.hpp b/src/mlpack/methods/ann/layer/softmax_layer.hpp
index 7d0672d..44e26e3 100644
--- a/src/mlpack/methods/ann/layer/softmax_layer.hpp
+++ b/src/mlpack/methods/ann/layer/softmax_layer.hpp
@@ -2,11 +2,11 @@
  * @file softmax_layer.hpp
  * @author Marcus Edel
  *
- * Definition of the SoftmaxLayer class, which implements a standard network
- * layer.
+ * Definition of the SoftmaxLayer class, which implements a standard softmax
+ * network layer.
  */
-#ifndef __MLPACK_METHOS_ANN_LAYER_SOFTMAX_LAYER_HPP
-#define __MLPACK_METHOS_ANN_LAYER_SOFTMAX_LAYER_HPP
+#ifndef __MLPACK_METHODS_ANN_LAYER_SOFTMAX_LAYER_HPP
+#define __MLPACK_METHODS_ANN_LAYER_SOFTMAX_LAYER_HPP
 
 #include <mlpack/core.hpp>
 
@@ -70,17 +70,17 @@ class SoftmaxLayer
 
   //! Get the input activations.
   VecType& InputActivation() const { return inputActivations; }
-  //  //! Modify the input activations.
+  //! Modify the input activations.
   VecType& InputActivation() { return inputActivations; }
 
   //! Get the detla.
   VecType& Delta() const { return delta; }
- //  //! Modify the delta.
+  //! Modify the delta.
   VecType& Delta() { return delta; }
 
   //! Get input size.
   size_t InputSize() const { return layerSize; }
-  //  //! Modify the delta.
+  //! Modify the delta.
   size_t& InputSize() { return layerSize; }
 
   //! Get output size.
@@ -88,9 +88,12 @@ class SoftmaxLayer
   //! Modify the output size.
   size_t& OutputSize() { return layerSize; }
 
-    //! Get the number of layer slices.
+  //! Get the number of layer slices.
   size_t LayerSlices() const { return 1; }
 
+  //! Get the number of output maps.
+  size_t OutputMaps() const { return 1; }
+
  private:
   //! Locally-stored input activation object.
   VecType inputActivations;



More information about the mlpack-git mailing list