[mlpack-git] master: Initialize the gradient storage with zero. (069d995)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Mar 5 22:11:28 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/904762495c039e345beba14c1142fd719b3bd50e...f94823c800ad6f7266995c700b1b630d5ffdcf40

>---------------------------------------------------------------

commit 069d9951856a8774a04c9e612a561840a24f9ef6
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Fri Jan 9 13:23:45 2015 +0100

    Initialize the gradient storage with zero.


>---------------------------------------------------------------

069d9951856a8774a04c9e612a561840a24f9ef6
 src/mlpack/methods/ann/ffnn.hpp | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/mlpack/methods/ann/ffnn.hpp b/src/mlpack/methods/ann/ffnn.hpp
index 6bb30c3..907782e 100644
--- a/src/mlpack/methods/ann/ffnn.hpp
+++ b/src/mlpack/methods/ann/ffnn.hpp
@@ -67,6 +67,7 @@ class FFNN
       ResetActivations(network);
       std::get<0>(
             std::get<0>(network)).InputLayer().InputActivation() = input;
+
       FeedForward(network, target, error);
     }
 
@@ -406,7 +407,7 @@ class FFNN
     Layer(std::tuple<Tp...>& t)
     {
       gradients.push_back(new MatType(std::get<I>(t).Weights().n_rows,
-          std::get<I>(t).Weights().n_cols));
+          std::get<I>(t).Weights().n_cols, arma::fill::zeros));
 
       Layer<I + 1, Tp...>(t);
     }



More information about the mlpack-git mailing list