[mlpack-svn] master: Add implementation of the NeuronLayer class which can be used as basic network layer. (5104bd4)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Fri Jan 2 11:14:07 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/818347e1556f54d5d0c54974758755b53f1608e0...5104bd4c07135b5dab3136c64e113dfb8de5d417

>---------------------------------------------------------------

commit 5104bd4c07135b5dab3136c64e113dfb8de5d417
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Fri Jan 2 17:14:01 2015 +0100

    Add implementation of the NeuronLayer class which can be used as basic network layer.


>---------------------------------------------------------------

5104bd4c07135b5dab3136c64e113dfb8de5d417
 .../methods/ann/init_rules/orthogonal_init.hpp     |  2 +-
 .../ann/layer/{bias_layer.hpp => neuron_layer.hpp} | 83 +++++++++++++++-------
 2 files changed, 57 insertions(+), 28 deletions(-)

diff --git a/src/mlpack/methods/ann/init_rules/orthogonal_init.hpp b/src/mlpack/methods/ann/init_rules/orthogonal_init.hpp
index 8764664..41ade2e 100644
--- a/src/mlpack/methods/ann/init_rules/orthogonal_init.hpp
+++ b/src/mlpack/methods/ann/init_rules/orthogonal_init.hpp
@@ -48,7 +48,7 @@ class OrthogonalInitialization
   }
 
  private:
-  //! The number used as lower bound.
+  //! The number used as gain.
   const double gain;
 }; // class OrthogonalInitialization
 
diff --git a/src/mlpack/methods/ann/layer/bias_layer.hpp b/src/mlpack/methods/ann/layer/neuron_layer.hpp
similarity index 58%
copy from src/mlpack/methods/ann/layer/bias_layer.hpp
copy to src/mlpack/methods/ann/layer/neuron_layer.hpp
index dde95dd..4b0105a 100644
--- a/src/mlpack/methods/ann/layer/bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/neuron_layer.hpp
@@ -1,44 +1,52 @@
 /**
- * @file bias_layer.hpp
+ * @file neuron_layer.hpp
  * @author Marcus Edel
  *
- * Definition of the BiasLayer class, which implements a standard bias
+ * Definition of the NeuronLayer class, which implements a standard network
  * layer.
  */
-#ifndef __MLPACK_METHOS_ANN_LAYER_BIAS_LAYER_HPP
-#define __MLPACK_METHOS_ANN_LAYER_BIAS_LAYER_HPP
+#ifndef __MLPACK_METHOS_ANN_LAYER_NEURON_LAYER_HPP
+#define __MLPACK_METHOS_ANN_LAYER_NEURON_LAYER_HPP
 
 #include <mlpack/core.hpp>
 #include <mlpack/methods/ann/layer/layer_traits.hpp>
-#include <mlpack/methods/ann/activation_functions/identity_function.hpp>
+#include <mlpack/methods/ann/activation_functions/logistic_function.hpp>
+#include <mlpack/methods/ann/activation_functions/rectifier_function.hpp>
 
 namespace mlpack {
 namespace ann /** Artificial Neural Network. */ {
 
 /**
- * An implementation of a standard bias layer with a default value of one.
+ * An implementation of a standard network layer.
  *
- * @tparam ActivationFunction Activation function used for the bias layer
- * (Default IdentityFunction).
+ * This class allows the specification of the type of the activation function.
+ *
+ * A few convenience typedefs are given:
+ *
+ *  - InputLayer
+ *  - HiddenLayer
+ *  - ReluLayer
+ *
+ * @tparam ActivationFunction Activation function used for the embedding layer.
  * @tparam MatType Type of data (arma::mat or arma::sp_mat).
  * @tparam VecType Type of data (arma::colvec, arma::mat or arma::sp_mat).
  */
 template <
-    class ActivationFunction = IdentityFunction,
+    class ActivationFunction = LogisticFunction,
     typename MatType = arma::mat,
     typename VecType = arma::colvec
 >
-class BiasLayer
+class NeuronLayer
 
 {
  public:
   /**
-   * Create the BiasLayer object using the specified number of bias units.
+   * Create the NeuronLayer object using the specified number of neurons.
    *
    * @param layerSize The number of neurons.
    */
-  BiasLayer(const size_t layerSize) :
-      inputActivations(arma::ones<VecType>(layerSize)),
+  NeuronLayer(const size_t layerSize) :
+      inputActivations(arma::zeros<VecType>(layerSize)),
       delta(arma::zeros<VecType>(layerSize)),
       layerSize(layerSize)
   {
@@ -79,7 +87,7 @@ class BiasLayer
   }
 
   //! Get the input activations.
-  const VecType& InputActivation() const { return inputActivations; }
+  VecType& InputActivation() const { return inputActivations; }
   //  //! Modify the input activations.
   VecType& InputActivation() { return inputActivations; }
 
@@ -107,20 +115,41 @@ class BiasLayer
 
   //! Locally-stored number of neurons.
   size_t layerSize;
-}; // class BiasLayer
+}; // class NeuronLayer
+
+// Convenience typedefs.
+
+/**
+ * Standard Input-Layer using the logistic activation function.
+ */
+template <
+    class ActivationFunction = LogisticFunction,
+    typename MatType = arma::mat,
+    typename VecType = arma::colvec
+>
+using InputLayer = NeuronLayer<ActivationFunction, MatType, VecType>;
+
+/**
+ * Standard Hidden-Layer using the logistic activation function.
+ */
+template <
+    class ActivationFunction = LogisticFunction,
+    typename MatType = arma::mat,
+    typename VecType = arma::colvec
+>
+using HiddenLayer = NeuronLayer<ActivationFunction, MatType, VecType>;
+
+/**
+ * Layer of rectified linear units (relu) using the rectifier activation
+ * function.
+ */
+template <
+    class ActivationFunction = RectifierFunction,
+    typename MatType = arma::mat,
+    typename VecType = arma::colvec
+>
+using ReluLayer = NeuronLayer<ActivationFunction, MatType, VecType>;
 
-//! Layer traits for the bias layer.
-template<>
-class LayerTraits<BiasLayer<> >
-{
- public:
-  /**
-   * If true, then the layer is binary.
-   */
-  static const bool IsBinary = false;
-  static const bool IsOutputLayer = false;
-  static const bool IsBiasLayer = true;
-};
 
 }; // namespace ann
 }; // namespace mlpack




More information about the mlpack-git mailing list