[mlpack-git] master: Add implementation of the SoftmaxLayer class. (97fdd15)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Mar 5 22:10:05 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/904762495c039e345beba14c1142fd719b3bd50e...f94823c800ad6f7266995c700b1b630d5ffdcf40

>---------------------------------------------------------------

commit 97fdd15b55a23274c453bdd09c14ce7e7d7733d4
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Sat Jan 3 14:07:53 2015 +0100

    Add implementation of the SoftmaxLayer class.


>---------------------------------------------------------------

97fdd15b55a23274c453bdd09c14ce7e7d7733d4
 .../layer/{bias_layer.hpp => softmax_layer.hpp}    | 55 +++++++---------------
 1 file changed, 16 insertions(+), 39 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/bias_layer.hpp b/src/mlpack/methods/ann/layer/softmax_layer.hpp
similarity index 61%
copy from src/mlpack/methods/ann/layer/bias_layer.hpp
copy to src/mlpack/methods/ann/layer/softmax_layer.hpp
index dde95dd..63af854 100644
--- a/src/mlpack/methods/ann/layer/bias_layer.hpp
+++ b/src/mlpack/methods/ann/layer/softmax_layer.hpp
@@ -1,44 +1,36 @@
 /**
- * @file bias_layer.hpp
+ * @file softmax_layer.hpp
  * @author Marcus Edel
  *
- * Definition of the BiasLayer class, which implements a standard bias
+ * Definition of the SoftmaxLayer class, which implements a standard network
  * layer.
  */
-#ifndef __MLPACK_METHOS_ANN_LAYER_BIAS_LAYER_HPP
-#define __MLPACK_METHOS_ANN_LAYER_BIAS_LAYER_HPP
+#ifndef __MLPACK_METHOS_ANN_LAYER_SOFTMAX_LAYER_HPP
+#define __MLPACK_METHOS_ANN_LAYER_SOFTMAX_LAYER_HPP
 
 #include <mlpack/core.hpp>
-#include <mlpack/methods/ann/layer/layer_traits.hpp>
-#include <mlpack/methods/ann/activation_functions/identity_function.hpp>
 
 namespace mlpack {
 namespace ann /** Artificial Neural Network. */ {
 
 /**
- * An implementation of a standard bias layer with a default value of one.
+ * An implementation of a standard softmax layer.
  *
- * @tparam ActivationFunction Activation function used for the bias layer
- * (Default IdentityFunction).
  * @tparam MatType Type of data (arma::mat or arma::sp_mat).
  * @tparam VecType Type of data (arma::colvec, arma::mat or arma::sp_mat).
  */
-template <
-    class ActivationFunction = IdentityFunction,
-    typename MatType = arma::mat,
-    typename VecType = arma::colvec
->
-class BiasLayer
+template <typename MatType = arma::mat, typename VecType = arma::colvec>
+class SoftmaxLayer
 
 {
  public:
   /**
-   * Create the BiasLayer object using the specified number of bias units.
+   * Create the SoftmaxLayer object using the specified number of neurons.
    *
    * @param layerSize The number of neurons.
    */
-  BiasLayer(const size_t layerSize) :
-      inputActivations(arma::ones<VecType>(layerSize)),
+  SoftmaxLayer(const size_t layerSize) :
+      inputActivations(arma::zeros<VecType>(layerSize)),
       delta(arma::zeros<VecType>(layerSize)),
       layerSize(layerSize)
   {
@@ -55,7 +47,8 @@ class BiasLayer
    */
   void FeedForward(const VecType& inputActivation, VecType& outputActivation)
   {
-    ActivationFunction::fn(inputActivation, outputActivation);
+    outputActivation = arma::trunc_exp(inputActivation);
+    outputActivation /= arma::accu(inputActivation);
   }
 
   /**
@@ -68,18 +61,15 @@ class BiasLayer
    * @param delta The calculating delta using the partial derivative of the
    * error with respect to a weight.
    */
-  void FeedBackward(const VecType& inputActivation,
+  void FeedBackward(const VecType& /* unused */,
                     const VecType& error,
                     VecType& delta)
   {
-    VecType derivative;
-    ActivationFunction::deriv(inputActivation, derivative);
-
-    delta = error % derivative;
+    delta = error;
   }
 
   //! Get the input activations.
-  const VecType& InputActivation() const { return inputActivations; }
+  VecType& InputActivation() const { return inputActivations; }
   //  //! Modify the input activations.
   VecType& InputActivation() { return inputActivations; }
 
@@ -107,20 +97,7 @@ class BiasLayer
 
   //! Locally-stored number of neurons.
   size_t layerSize;
-}; // class BiasLayer
-
-//! Layer traits for the bias layer.
-template<>
-class LayerTraits<BiasLayer<> >
-{
- public:
-  /**
-   * If true, then the layer is binary.
-   */
-  static const bool IsBinary = false;
-  static const bool IsOutputLayer = false;
-  static const bool IsBiasLayer = true;
-};
+}; // class SoftmaxLayer
 
 }; // namespace ann
 }; // namespace mlpack



More information about the mlpack-git mailing list