[mlpack-git] master: Add identity connection. (b9ac6ef)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Sat Jun 27 08:47:27 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/9f0d3b30c469a3a535d3672b70c88aacbb2753c1...cf08dc96724f70468c8e31ced1761ed000b55b18

>---------------------------------------------------------------

commit b9ac6efcec28a0c3c6c8ae867ff272b52092f722
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Thu Jun 25 16:18:34 2015 +0200

    Add identity connection.


>---------------------------------------------------------------

b9ac6efcec28a0c3c6c8ae867ff272b52092f722
 .../ann/connections/identity_connection.hpp        | 169 +++++++++++++++++++++
 1 file changed, 169 insertions(+)

diff --git a/src/mlpack/methods/ann/connections/identity_connection.hpp b/src/mlpack/methods/ann/connections/identity_connection.hpp
new file mode 100644
index 0000000..39e70a0
--- /dev/null
+++ b/src/mlpack/methods/ann/connections/identity_connection.hpp
@@ -0,0 +1,169 @@
+/**
+ * @file identity_connection.hpp
+ * @author Marcus Edel
+ *
+ * Implementation of the identity connection between the input- and output
+ * layer.
+ */
+#ifndef __MLPACK_METHODS_ANN_CONNECTIONS_IDENTITY_CONNECTION_HPP
+#define __MLPACK_METHODS_ANN_CONNECTIONS_IDENTITY_CONNECTION_HPP
+
+#include <mlpack/core.hpp>
+#include <mlpack/methods/ann/optimizer/rmsprop.hpp>
+#include <mlpack/methods/ann/connections/connection_traits.hpp>
+
+namespace mlpack{
+namespace ann /** Artificial Neural Network. */ {
+
+/**
+ * Implementation of the indentity connection class. The identity connection
+ * connects the i'th element from the input layer with i'th element of the
+ * output layer.
+ *
+ * @tparam InputLayerType Type of the connected input layer.
+ * @tparam OutputLayerType Type of the connected output layer.
+ * @tparam OptimizerType Type of the optimizer used to update the weights.
+ * @tparam DataType Type of data (arma::mat, arma::sp_mat or arma::cube).
+ */
+template<
+  typename InputLayerType,
+  typename OutputLayerType,
+  template<typename, typename> class OptimizerType = mlpack::ann::RMSPROP,
+  typename DataType = arma::cube
+>
+class IdentityConnection
+{
+ public:
+  /**
+   * Create the IdentityConnection object using the specified input layer and
+   * output layer.
+   *
+   * @param InputLayerType The input layer which is connected with the output
+   * layer.
+   * @param OutputLayerType The output layer which is connected with the input
+   * layer.
+   */
+  IdentityConnection(InputLayerType& inputLayer,
+                    OutputLayerType& outputLayer) :
+      inputLayer(inputLayer),
+      outputLayer(outputLayer),
+      optimizer(0),
+      weights(0),
+      delta(arma::zeros<DataType>(inputLayer.Delta().n_rows,
+          inputLayer.Delta().n_cols, inputLayer.Delta().n_slices))
+  {
+    // Nothing to do here.
+  }
+
+  /**
+   * Ordinary feed forward pass of a neural network.
+   *
+   * @param input Input data used for the forward pass.
+   */
+  template<typename InputType>
+  void FeedForward(const InputType& input)
+  {
+    outputLayer.InputActivation() += input;
+  }
+
+  /**
+   * Ordinary feed backward pass of a neural network.
+   *
+   * @param error The backpropagated error.
+   */
+  template<typename ErrorType>
+  void FeedBackward(const ErrorType& error)
+  {
+    delta = error;
+  }
+
+  /*
+   * Calculate the gradient using the output delta and the input activation.
+   *
+   * @param gradient The calculated gradient.
+   */
+  template<typename GradientType>
+  void Gradient(GradientType& /* unused */)
+  {
+    // Nothing to do here.
+  }
+
+  //! Get the weights.
+  DataType& Weights() const { return *weights; }
+  //! Modify the weights.
+  DataType& Weights() { return *weights; }
+
+  //! Get the input layer.
+  InputLayerType& InputLayer() const { return inputLayer; }
+  //! Modify the input layer.
+  InputLayerType& InputLayer() { return inputLayer; }
+
+  //! Get the output layer.
+  OutputLayerType& OutputLayer() const { return outputLayer; }
+  //! Modify the output layer.
+  OutputLayerType& OutputLayer() { return outputLayer; }
+
+  //! Get the optimizer.
+  OptimizerType<IdentityConnection<InputLayerType,
+                                  OutputLayerType,
+                                  OptimizerType,
+                                  DataType>, DataType>& Optimzer() const
+  {
+    return *optimizer;
+  }
+  //! Modify the optimzer.
+  OptimizerType<IdentityConnection<InputLayerType,
+                                  OutputLayerType,
+                                  OptimizerType,
+                                  DataType>, DataType>& Optimzer()
+  {
+    return *optimizer;
+  }
+
+  //! Get the passed error in backward propagation.
+  DataType& Delta() const { return delta; }
+  //! Modify the passed error in backward propagation.
+  DataType& Delta() { return delta; }
+
+ private:
+  //! Locally-stored input layer.
+  InputLayerType& inputLayer;
+
+  //! Locally-stored output layer.
+  OutputLayerType& outputLayer;
+
+  //! Locally-stored optimizer.
+  OptimizerType<IdentityConnection<InputLayerType,
+                                  OutputLayerType,
+                                  OptimizerType,
+                                  DataType>, DataType>* optimizer;
+
+  //! Locally-stored weight object.
+  DataType* weights;
+
+  //! Locally-stored passed error in backward propagation.
+  DataType delta;
+}; // IdentityConnection class.
+
+//! Connection traits for the identity connection.
+template<
+    typename InputLayerType,
+    typename OutputLayerType,
+    template<typename, typename> class OptimizerType,
+    typename DataType
+>
+class ConnectionTraits<
+    IdentityConnection<InputLayerType, OutputLayerType,OptimizerType,
+    DataType> >
+{
+ public:
+  static const bool IsSelfConnection = false;
+  static const bool IsFullselfConnection = false;
+  static const bool IsPoolingConnection = false;
+  static const bool IsIdentityConnection = true;
+};
+
+}; // namespace ann
+}; // namespace mlpack
+
+#endif
\ No newline at end of file



More information about the mlpack-git mailing list