[mlpack-git] master: Reset the delta before calling the unsampling method. (7fb3213)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Jun 4 04:47:18 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/2f479f388ee3d34e4a20535c3662b1921a4c6c06...7fb32130bd683cf03a853ea2bc6960e80d625955

>---------------------------------------------------------------

commit 7fb32130bd683cf03a853ea2bc6960e80d625955
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Wed Jun 3 22:31:45 2015 +0200

    Reset the delta before calling the unsampling method.


>---------------------------------------------------------------

7fb32130bd683cf03a853ea2bc6960e80d625955
 src/mlpack/methods/ann/connections/pooling_connection.hpp | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/src/mlpack/methods/ann/connections/pooling_connection.hpp b/src/mlpack/methods/ann/connections/pooling_connection.hpp
index f8ef8dc..8f86c90 100644
--- a/src/mlpack/methods/ann/connections/pooling_connection.hpp
+++ b/src/mlpack/methods/ann/connections/pooling_connection.hpp
@@ -42,13 +42,11 @@ class PoolingConnection
   /**
    * Create the PoolingConnection object using the specified input layer, output
    * layer, optimizer and pooling strategy.
-   * The factor and bias is stored in @weights.
    *
    * @param InputLayerType The input layer which is connected with the output
    * layer.
    * @param OutputLayerType The output layer which is connected with the input
    * layer.
-   * @param OptimizerType The optimizer used to update the weight matrix.
    * @param PoolingRule The strategy of pooling.
    */
   PoolingConnection(InputLayerType& inputLayer,
@@ -59,8 +57,8 @@ class PoolingConnection
       optimizer(0),
       weights(0),
       pooling(pooling),
-      delta(inputLayer.Delta().n_rows, inputLayer.Delta().n_cols,
-            inputLayer.Delta().n_slices)
+      delta(arma::zeros<DataType>(inputLayer.Delta().n_rows,
+          inputLayer.Delta().n_cols, inputLayer.Delta().n_slices))
   {
     // Nothing to do here.
   }
@@ -99,6 +97,7 @@ class PoolingConnection
   template<typename eT>
   void FeedBackward(const arma::Mat<eT>& error)
   {
+    delta.zeros();
     Unpooling(inputLayer.InputActivation(), error, inputLayer.Delta());
   }
 
@@ -111,6 +110,7 @@ class PoolingConnection
   template<typename eT>
   void FeedBackward(const arma::Cube<eT>& error)
   {
+    delta.zeros();
     for (size_t s = 0; s < error.n_slices; s++)
     {
       Unpooling(inputLayer.InputActivation().slice(s), error.slice(s),
@@ -201,7 +201,7 @@ class PoolingConnection
     {
       for (size_t i = 0; i < input.n_rows; i += rStep)
       {
-        const arma::Mat<eT>& inputArea = input(arma::span(i, i + rStep -1),
+        const arma::Mat<eT>& inputArea = input(arma::span(i, i + rStep - 1),
                                                arma::span(j, j + cStep - 1));
 
         pooling.Unpooling(inputArea, error(i / rStep, j / cStep),



More information about the mlpack-git mailing list