[mlpack-git] master: Add 3rd-order tensor support (Dropout layer). (6c4bcf6)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Sep 3 08:35:41 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/91ae1062772a0f2bbca9a072769629c2d775ae64...42d61dfdbc9b0cbce59398e67ea58544b0fa1919

>---------------------------------------------------------------

commit 6c4bcf63d864d7a040a984dfebe8079311ae2020
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Thu Sep 3 14:25:29 2015 +0200

    Add 3rd-order tensor support (Dropout layer).


>---------------------------------------------------------------

6c4bcf63d864d7a040a984dfebe8079311ae2020
 src/mlpack/methods/ann/layer/dropout_layer.hpp | 54 ++++++++++++++++++++------
 1 file changed, 43 insertions(+), 11 deletions(-)

diff --git a/src/mlpack/methods/ann/layer/dropout_layer.hpp b/src/mlpack/methods/ann/layer/dropout_layer.hpp
index cafec13..9c7511b 100644
--- a/src/mlpack/methods/ann/layer/dropout_layer.hpp
+++ b/src/mlpack/methods/ann/layer/dropout_layer.hpp
@@ -87,14 +87,37 @@ class DropoutLayer
       // ratio.
       scale = 1.0 / (1.0 - ratio);
       mask = arma::randu<arma::Mat<eT> >(input.n_rows, input.n_cols);
+      mask.transform( [&](double val) { return (val > ratio); } );
+      output = input % mask * scale;
+    }
+  }
 
-      arma::mat::iterator a = mask.begin();
-      arma::mat::iterator b = mask.end();
-      for(arma::mat::iterator i = a; i != b; ++i)
-      {
-        (*i) = (*i) > ratio;
-      }
+  /**
+   * Ordinary feed forward pass of the dropout layer.
+   *
+   * @param input Input data used for evaluating the specified function.
+   * @param output Resulting output activation.
+   */
+  template<typename eT>
+  void Forward(const arma::Cube<eT>& input, arma::Cube<eT>& output)
+  {
+    // The dropout mask will not be multiplied in the deterministic mode
+    // (during testing).
+    if (deterministic)
+    {
+      output = input;
 
+      if (rescale)
+        output *= scale;
+    }
+    else
+    {
+      // Scale with input / (1 - ratio) and set values to zero with probability
+      // ratio.
+      scale = 1.0 / (1.0 - ratio);
+      mask = arma::randu<arma::Cube<eT> >(input.n_rows, input.n_cols,
+          input.n_slices);
+      mask.transform( [&](double val) { return (val > ratio); } );
       output = input % mask * scale;
     }
   }
@@ -106,10 +129,10 @@ class DropoutLayer
    * @param gy The backpropagated error.
    * @param g The calculated gradient.
    */
-  template<typename eT>
-  void Backward(const arma::Mat<eT>& /* unused */,
-                const arma::Mat<eT>& gy,
-                arma::Mat<eT>& g)
+  template<typename DataType>
+  void Backward(const DataType& /* unused */,
+                const DataType& gy,
+                DataType& g)
   {
     g = gy % mask * scale;
   }
@@ -171,7 +194,7 @@ class DropoutLayer
 }; // class DropoutLayer
 
 //! Layer traits for the bias layer.
-template<
+template <
   typename InputDataType,
   typename OutputDataType
 >
@@ -185,6 +208,15 @@ class LayerTraits<DropoutLayer<InputDataType, OutputDataType> >
   static const bool IsConnection = true;
 };
 
+/**
+ * Standard Dropout-Layer2D.
+ */
+template <
+    typename InputDataType = arma::cube,
+    typename OutputDataType = arma::cube
+>
+using DropoutLayer2D = DropoutLayer<InputDataType, OutputDataType>;
+
 }; // namespace ann
 }; // namespace mlpack
 



More information about the mlpack-git mailing list