[mlpack-git] master: The connection type defines how to multiply the input activation and the delta. (704718e)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Thu Mar 5 22:13:47 EST 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/904762495c039e345beba14c1142fd719b3bd50e...f94823c800ad6f7266995c700b1b630d5ffdcf40

>---------------------------------------------------------------

commit 704718e2c0c339cda6f819e1dc5f2db1900afc51
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Wed Jan 21 14:38:04 2015 +0100

    The connection type defines how to multiply the input activation and the delta.


>---------------------------------------------------------------

704718e2c0c339cda6f819e1dc5f2db1900afc51
 src/mlpack/methods/ann/connections/full_connection.hpp     | 10 ++++++++++
 src/mlpack/methods/ann/connections/fullself_connection.hpp | 10 ++++++++++
 src/mlpack/methods/ann/connections/self_connection.hpp     | 10 ++++++++++
 3 files changed, 30 insertions(+)

diff --git a/src/mlpack/methods/ann/connections/full_connection.hpp b/src/mlpack/methods/ann/connections/full_connection.hpp
index da95755..dcccb9b 100644
--- a/src/mlpack/methods/ann/connections/full_connection.hpp
+++ b/src/mlpack/methods/ann/connections/full_connection.hpp
@@ -83,6 +83,16 @@ class FullConnection
     delta = (weights.t() * error);
   }
 
+  /*
+   * Calculate the gradient using the output delta and the input activation.
+   *
+   * @param gradient The calculated gradient.
+   */
+  void Gradient(MatType& gradient)
+  {
+    gradient = outputLayer.Delta() * inputLayer.InputActivation().t();
+  }
+
   //! Get the weights.
   MatType& Weights() const { return weights; }
   //! Modify the weights.
diff --git a/src/mlpack/methods/ann/connections/fullself_connection.hpp b/src/mlpack/methods/ann/connections/fullself_connection.hpp
index 37b6dc3..470be4e 100644
--- a/src/mlpack/methods/ann/connections/fullself_connection.hpp
+++ b/src/mlpack/methods/ann/connections/fullself_connection.hpp
@@ -85,6 +85,16 @@ class FullselfConnection
     delta = (weights.t() * error);
   }
 
+  /*
+   * Calculate the gradient using the output delta and the input activation.
+   *
+   * @param gradient The calculated gradient.
+   */
+  void Gradient(MatType& gradient)
+  {
+    gradient = outputLayer.Delta() * inputLayer.InputActivation().t();
+  }
+
   //! Get the weights.
   MatType& Weights() const { return weights; }
   //! Modify the weights.
diff --git a/src/mlpack/methods/ann/connections/self_connection.hpp b/src/mlpack/methods/ann/connections/self_connection.hpp
index 6401c42..fb3e226 100644
--- a/src/mlpack/methods/ann/connections/self_connection.hpp
+++ b/src/mlpack/methods/ann/connections/self_connection.hpp
@@ -84,6 +84,16 @@ class SelfConnection
     delta = (weights.t() * error);
   }
 
+  /*
+   * Calculate the gradient using the output delta and the input activation.
+   *
+   * @param gradient The calculated gradient.
+   */
+  void Gradient(MatType& gradient)
+  {
+    gradient = outputLayer.Delta() % inputLayer.InputActivation();
+  }
+
   //! Get the weights.
   const MatType& Weights() const { return weights; }
   //! Modify the weights.



More information about the mlpack-git mailing list