[mlpack-git] master: The connection type defines how to multiply the input activation and the delta. (b042186)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Fri Feb 27 15:51:42 EST 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/594fd9f61d1280152c758559b4fc60bf0c827cca...45f682337b1daa4c82797f950e16a605fe4971bd
>---------------------------------------------------------------
commit b042186d53a8c77e9174ce72d0e6c94dde1fc2b4
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Wed Jan 21 14:38:04 2015 +0100
The connection type defines how to multiply the input activation and the delta.
>---------------------------------------------------------------
b042186d53a8c77e9174ce72d0e6c94dde1fc2b4
src/mlpack/methods/ann/connections/full_connection.hpp | 10 ++++++++++
src/mlpack/methods/ann/connections/fullself_connection.hpp | 10 ++++++++++
src/mlpack/methods/ann/connections/self_connection.hpp | 10 ++++++++++
3 files changed, 30 insertions(+)
diff --git a/src/mlpack/methods/ann/connections/full_connection.hpp b/src/mlpack/methods/ann/connections/full_connection.hpp
index da95755..dcccb9b 100644
--- a/src/mlpack/methods/ann/connections/full_connection.hpp
+++ b/src/mlpack/methods/ann/connections/full_connection.hpp
@@ -83,6 +83,16 @@ class FullConnection
delta = (weights.t() * error);
}
+ /*
+ * Calculate the gradient using the output delta and the input activation.
+ *
+ * @param gradient The calculated gradient.
+ */
+ void Gradient(MatType& gradient)
+ {
+ gradient = outputLayer.Delta() * inputLayer.InputActivation().t();
+ }
+
//! Get the weights.
MatType& Weights() const { return weights; }
//! Modify the weights.
diff --git a/src/mlpack/methods/ann/connections/fullself_connection.hpp b/src/mlpack/methods/ann/connections/fullself_connection.hpp
index 37b6dc3..470be4e 100644
--- a/src/mlpack/methods/ann/connections/fullself_connection.hpp
+++ b/src/mlpack/methods/ann/connections/fullself_connection.hpp
@@ -85,6 +85,16 @@ class FullselfConnection
delta = (weights.t() * error);
}
+ /*
+ * Calculate the gradient using the output delta and the input activation.
+ *
+ * @param gradient The calculated gradient.
+ */
+ void Gradient(MatType& gradient)
+ {
+ gradient = outputLayer.Delta() * inputLayer.InputActivation().t();
+ }
+
//! Get the weights.
MatType& Weights() const { return weights; }
//! Modify the weights.
diff --git a/src/mlpack/methods/ann/connections/self_connection.hpp b/src/mlpack/methods/ann/connections/self_connection.hpp
index 6401c42..fb3e226 100644
--- a/src/mlpack/methods/ann/connections/self_connection.hpp
+++ b/src/mlpack/methods/ann/connections/self_connection.hpp
@@ -84,6 +84,16 @@ class SelfConnection
delta = (weights.t() * error);
}
+ /*
+ * Calculate the gradient using the output delta and the input activation.
+ *
+ * @param gradient The calculated gradient.
+ */
+ void Gradient(MatType& gradient)
+ {
+ gradient = outputLayer.Delta() % inputLayer.InputActivation();
+ }
+
//! Get the weights.
const MatType& Weights() const { return weights; }
//! Modify the weights.
More information about the mlpack-git
mailing list