[mlpack-git] master: Refactor all activation functions and layer which uses the transform() function. (3375caa)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Sat Aug 29 10:30:30 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/f2758b220d426f52f0820426c89bceb1b1022532...3375caa34a38ccc6568970acde0b25946e60414f

>---------------------------------------------------------------

commit 3375caa34a38ccc6568970acde0b25946e60414f
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Sat Aug 29 16:30:23 2015 +0200

    Refactor all activation functions and layer which uses the transform() function.


>---------------------------------------------------------------

3375caa34a38ccc6568970acde0b25946e60414f
 .../methods/ann/activation_functions/rectifier_function.hpp      | 4 +++-
 .../methods/ann/activation_functions/softsign_function.hpp       | 4 +++-
 src/mlpack/methods/ann/layer/dropout_layer.hpp                   | 9 ++++++++-
 3 files changed, 14 insertions(+), 3 deletions(-)

diff --git a/src/mlpack/methods/ann/activation_functions/rectifier_function.hpp b/src/mlpack/methods/ann/activation_functions/rectifier_function.hpp
index 593b4ec..79db16b 100644
--- a/src/mlpack/methods/ann/activation_functions/rectifier_function.hpp
+++ b/src/mlpack/methods/ann/activation_functions/rectifier_function.hpp
@@ -98,7 +98,9 @@ class RectifierFunction
   static void deriv(const InputType& y, OutputType& x)
   {
     x = y;
-    x.transform( [](double y) { return deriv(y); } );
+
+    for (size_t i = 0; i < y.n_elem; i++)
+      x(i) = deriv(y(i));
   }
 }; // class RectifierFunction
 
diff --git a/src/mlpack/methods/ann/activation_functions/softsign_function.hpp b/src/mlpack/methods/ann/activation_functions/softsign_function.hpp
index b535449..e4dadbf 100644
--- a/src/mlpack/methods/ann/activation_functions/softsign_function.hpp
+++ b/src/mlpack/methods/ann/activation_functions/softsign_function.hpp
@@ -117,7 +117,9 @@ class SoftsignFunction
   static void inv(const InputVecType& y, OutputVecType& x)
   {
     x = y;
-    x.transform( [](double y) { return inv(y); } );
+
+    for (size_t i = 0; i < y.n_elem; i++)
+      x(i) = inv(y(i));
   }
 }; // class SoftsignFunction
 
diff --git a/src/mlpack/methods/ann/layer/dropout_layer.hpp b/src/mlpack/methods/ann/layer/dropout_layer.hpp
index 5b16436..cafec13 100644
--- a/src/mlpack/methods/ann/layer/dropout_layer.hpp
+++ b/src/mlpack/methods/ann/layer/dropout_layer.hpp
@@ -87,7 +87,14 @@ class DropoutLayer
       // ratio.
       scale = 1.0 / (1.0 - ratio);
       mask = arma::randu<arma::Mat<eT> >(input.n_rows, input.n_cols);
-      mask.transform( [&](double val) { return val > ratio; } );
+
+      arma::mat::iterator a = mask.begin();
+      arma::mat::iterator b = mask.end();
+      for(arma::mat::iterator i = a; i != b; ++i)
+      {
+        (*i) = (*i) > ratio;
+      }
+
       output = input % mask * scale;
     }
   }



More information about the mlpack-git mailing list