[mlpack-svn] master: Add implementation of the full connection class. (902bec2)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Wed Dec 31 15:59:08 EST 2014
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/c935252ea3134025d7d0df05afa4a1501dad4d59...8c13d5c6d16fadd1fe4dfb2230adfaa0268e95dd
>---------------------------------------------------------------
commit 902bec28b2b4f9949cfa9cd48efdcfc760e61ee0
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Wed Dec 31 21:58:19 2014 +0100
Add implementation of the full connection class.
>---------------------------------------------------------------
902bec28b2b4f9949cfa9cd48efdcfc760e61ee0
.../methods/ann/connections/full_connection.hpp | 126 ++++++++++++++++++
.../ann/connections/fullself_connection.hpp | 147 +++++++++++++++++++++
2 files changed, 273 insertions(+)
diff --git a/src/mlpack/methods/ann/connections/full_connection.hpp b/src/mlpack/methods/ann/connections/full_connection.hpp
new file mode 100644
index 0000000..82efd49
--- /dev/null
+++ b/src/mlpack/methods/ann/connections/full_connection.hpp
@@ -0,0 +1,126 @@
+/**
+ * @file full_connection.hpp
+ * @author Marcus Edel
+ *
+ * Implementation of the full connection class.
+ */
+#ifndef __MLPACK_METHOS_ANN_CONNECTIONS_FULL_CONNECTION_HPP
+#define __MLPACK_METHOS_ANN_CONNECTIONS_FULL_CONNECTION_HPP
+
+#include <mlpack/core.hpp>
+#include <mlpack/methods/ann/init_rules/nguyen_widrow_init.hpp>
+
+namespace mlpack {
+namespace ann /** Artificial Neural Network. */ {
+
+/**
+ * Implementation of the full connection class. The full connection connects
+ * every neuron from the input layer with the output layer in a matrix
+ * multiplicative way.
+ *
+ * @tparam InputLayerType Type of the connected input layer.
+ * @tparam OutputLayerType Type of the connected output layer.
+ * @tparam OptimizerType Type of the optimizer used to update the weights.
+ * @tparam WeightInitRule Rule used to initialize the weight matrix.
+ * @tparam MatType Type of data (arma::mat or arma::sp_mat).
+ * @tparam VecType Type of data (arma::colvec, arma::mat or arma::sp_mat).
+ */
+template<
+ typename InputLayerType,
+ typename OutputLayerType,
+ typename OptimizerType,
+ class WeightInitRule = NguyenWidrowInitialization<>,
+ typename MatType = arma::mat,
+ typename VecType = arma::colvec
+>
+class FullConnection
+{
+ public:
+ /**
+ * Create the FullConnection object using the specified input layer, output
+ * layer, optimizer and weight initialize rule.
+ *
+ * @param lowerBound The number used as lower bound.
+ * @param upperBound The number used as upper bound.
+ */
+ FullConnection(InputLayerType& inputLayer,
+ OutputLayerType& outputLayer,
+ OptimizerType& optimizer,
+ WeightInitRule weightInitRule = WeightInitRule()) :
+ inputLayer(inputLayer), outputLayer(outputLayer), optimizer(optimizer)
+ {
+ weightInitRule.Initialize(weights, outputLayer.InputSize(),
+ inputLayer.OutputSize());
+ }
+
+ /**
+ * Ordinary feed forward pass of a neural network, evaluating the function
+ * f(x) by propagating the activity forward through f.
+ *
+ * @param input Input data used for evaluating the specified activity function.
+ */
+ void FeedForward(const VecType& input)
+ {
+ outputLayer.InputActivation() += (weights * input);
+ }
+
+ /**
+ * Ordinary feed backward pass of a neural network, calculating the function
+ * f(x) by propagating x backwards trough f. Using the results from the feed
+ * forward pass.
+ *
+ * @param error The backpropagated error.
+ */
+ void FeedBackward(const VecType& error)
+ {
+ // Calculating the delta using the partial derivative of the error with
+ // respect to a weight.
+ delta = (weights.t() * error);
+ }
+
+ //! Get the weights.
+ MatType& Weights() const { return weights; }
+ //! Modify the weights.
+ MatType& Weights() { return weights; }
+
+ //! Get the input layer.
+ InputLayerType& InputLayer() const { return inputLayer; }
+ //! Modify the input layer.
+ InputLayerType& InputLayer() { return inputLayer; }
+
+ //! Get the output layer.
+ OutputLayerType& OutputLayer() const { return outputLayer; }
+ //! Modify the output layer.
+ OutputLayerType& OutputLayer() { return outputLayer; }
+
+ //! Get the optimzer.
+ OptimizerType& Optimzer() const { return optimizer; }
+ //! Modify the optimzer.
+ OptimizerType& Optimzer() { return optimizer; }
+
+ //! Get the detla.
+ VecType& Delta() const { return delta; }
+ // //! Modify the delta.
+ VecType& Delta() { return delta; }
+
+ private:
+ //! Locally-stored weight object.
+ MatType weights;
+
+ //! Locally-stored connected input layer object.
+ InputLayerType& inputLayer;
+
+ //! Locally-stored connected output layer object.
+ OutputLayerType& outputLayer;
+
+ //! Locally-stored optimzer object.
+ OptimizerType& optimizer;
+
+ //! Locally-stored detla object that holds the calculated delta.
+ VecType delta;
+}; // class FullConnection
+
+}; // namespace ann
+}; // namespace mlpack
+
+#endif
diff --git a/src/mlpack/methods/ann/connections/fullself_connection.hpp b/src/mlpack/methods/ann/connections/fullself_connection.hpp
new file mode 100644
index 0000000..5d7fc81
--- /dev/null
+++ b/src/mlpack/methods/ann/connections/fullself_connection.hpp
@@ -0,0 +1,147 @@
+/**
+ * @file fullself_connection.hpp
+ * @author Marcus Edel
+ *
+ * Implementation of the full self connection class. This is used mainly used as
+ * recurrent connection.
+ */
+#ifndef __MLPACK_METHOS_ANN_CONNECTIONS_FULLSELF_CONNECTION_HPP
+#define __MLPACK_METHOS_ANN_CONNECTIONS_FULLSELF_CONNECTION_HPP
+
+#include <mlpack/core.hpp>
+#include <mlpack/methods/ann/init_rules/nguyen_widrow_init.hpp>
+
+namespace mlpack {
+namespace ann /** Artificial Neural Network. */ {
+
+/**
+ * Implementation of the full self connection class. The full connection
+ * connects every neuron from the input layer with the output layer in a matrix
+ * multiplicative way. This connection is used as recurrent connection, the
+ * input would be automatically updated every time step.
+ *
+ * @tparam InputLayerType Type of the connected input layer.
+ * @tparam OutputLayerType Type of the connected output layer.
+ * @tparam OptimizerType Type of the optimizer used to update the weights.
+ * @tparam WeightInitRule Rule used to initialize the weight matrix.
+ * @tparam MatType Type of data (arma::mat or arma::sp_mat).
+ * @tparam VecType Type of data (arma::colvec, arma::mat or arma::sp_mat).
+ */
+template<
+ typename InputLayerType,
+ typename OutputLayerType,
+ typename OptimizerType,
+ class WeightInitRule = NguyenWidrowInitialization<>,
+ typename MatType = arma::mat,
+ typename VecType = arma::colvec
+>
+class FullselfConnection
+{
+ public:
+ /**
+ * Create the FullselfConnection object using the specified input layer,
+ * output layer, optimizer and weight initialize rule.
+ *
+ * @param lowerBound The number used as lower bound.
+ * @param upperBound The number used as upper bound.
+ */
+ FullselfConnection(InputLayerType& inputLayer,
+ OutputLayerType& outputLayer,
+ OptimizerType& optimizer,
+ WeightInitRule weightInitRule = WeightInitRule()) :
+ inputLayer(inputLayer), outputLayer(outputLayer), optimizer(optimizer)
+ {
+ weightInitRule.Initialize(weights, outputLayer.InputSize(),
+ inputLayer.OutputSize());
+ }
+
+ /**
+ * Ordinary feed forward pass of a neural network, evaluating the function
+ * f(x) by propagating the activity forward through f.
+ *
+ * @param input Input data used for evaluating the specified activity function.
+ */
+ void FeedForward(const VecType& input)
+ {
+ outputLayer.InputActivation() += (weights * input);
+ }
+
+ /**
+ * Ordinary feed backward pass of a neural network, calculating the function
+ * f(x) by propagating x backwards trough f. Using the results from the feed
+ * forward pass.
+ *
+ * @param error The backpropagated error.
+ */
+ void FeedBackward(const VecType& error)
+ {
+ // Calculating the delta using the partial derivative of the error with
+ // respect to a weight.
+ delta = (weights.t() * error);
+ }
+
+ //! Get the weights.
+ MatType& Weights() const { return weights; }
+ //! Modify the weights.
+ MatType& Weights() { return weights; }
+
+ //! Get the input layer.
+ InputLayerType& InputLayer() const { return inputLayer; }
+ //! Modify the input layer.
+ InputLayerType& InputLayer() { return inputLayer; }
+
+ //! Get the output layer.
+ OutputLayerType& OutputLayer() const { return outputLayer; }
+ //! Modify the output layer.
+ OutputLayerType& OutputLayer() { return outputLayer; }
+
+ //! Get the optimzer.
+ OptimizerType& Optimzer() const { return optimizer; }
+ //! Modify the optimzer.
+ OptimizerType& Optimzer() { return optimizer; }
+
+ //! Get the detla.
+ VecType& Delta() const { return delta; }
+ // //! Modify the delta.
+ VecType& Delta() { return delta; }
+
+ private:
+ //! Locally-stored weight object.
+ MatType weights;
+
+ //! Locally-stored connected input layer object.
+ InputLayerType& inputLayer;
+
+ //! Locally-stored connected output layer object.
+ OutputLayerType& outputLayer;
+
+ //! Locally-stored optimzer object.
+ OptimizerType& optimizer;
+
+ //! Locally-stored detla object that holds the calculated delta.
+ VecType delta;
+}; // class RandomInitialization
+
+
+//! Connection traits for the full self connection.
+template<
+ typename InputLayerType,
+ typename OutputLayerType,
+ typename OptimizerType,
+ class WeightInitRule,
+ typename MatType,
+ typename VecType
+>
+class ConnectionTraits<
+ FullselfConnection<InputLayerType, OutputLayerType, OptimizerType,
+ WeightInitRule, MatType, VecType> >
+{
+ public:
+ static const bool IsSelfConnection = false;
+ static const bool IsFullselfConnection = true;
+};
+
+}; // namespace ann
+}; // namespace mlpack
+
+#endif
More information about the mlpack-svn
mailing list