[mlpack-git] master: Adapt new design pattern. (f2dcb8f)

gitdub at big.cc.gt.atl.ga.us gitdub at big.cc.gt.atl.ga.us
Sun May 3 16:15:35 EDT 2015


Repository : https://github.com/mlpack/mlpack

On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/0f31abbdebcd34e2113d8acf47c1d0b087377921...174d2de995a3fe343cd92d158730f3afa03e622d

>---------------------------------------------------------------

commit f2dcb8fcbca211cac7d3444d21c82d63599aa1a8
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date:   Sun May 3 13:53:19 2015 +0200

    Adapt new design pattern.


>---------------------------------------------------------------

f2dcb8fcbca211cac7d3444d21c82d63599aa1a8
 .../methods/ann/connections/full_connection.hpp    | 91 +++++++++++++++++++---
 1 file changed, 79 insertions(+), 12 deletions(-)

diff --git a/src/mlpack/methods/ann/connections/full_connection.hpp b/src/mlpack/methods/ann/connections/full_connection.hpp
index 953665b..0257e2b 100644
--- a/src/mlpack/methods/ann/connections/full_connection.hpp
+++ b/src/mlpack/methods/ann/connections/full_connection.hpp
@@ -9,6 +9,7 @@
 
 #include <mlpack/core.hpp>
 #include <mlpack/methods/ann/init_rules/nguyen_widrow_init.hpp>
+#include <mlpack/methods/ann/optimizer/steepest_descent.hpp>
 
 namespace mlpack {
 namespace ann /** Artificial Neural Network. */ {
@@ -28,7 +29,7 @@ namespace ann /** Artificial Neural Network. */ {
 template<
     typename InputLayerType,
     typename OutputLayerType,
-    typename OptimizerType,
+    typename OptimizerType = SteepestDescent<>,
     class WeightInitRule = NguyenWidrowInitialization,
     typename MatType = arma::mat,
     typename VecType = arma::colvec
@@ -52,31 +53,74 @@ class FullConnection
                  OutputLayerType& outputLayer,
                  OptimizerType& optimizer,
                  WeightInitRule weightInitRule = WeightInitRule()) :
-      inputLayer(inputLayer), outputLayer(outputLayer), optimizer(optimizer)
+      inputLayer(inputLayer),
+      outputLayer(outputLayer),
+      optimizer(&optimizer),
+      ownsOptimizer(false)
   {
     weightInitRule.Initialize(weights, outputLayer.InputSize(),
-        inputLayer.OutputSize());
+        inputLayer.OutputSize() * inputLayer.LayerSlices());
+  }
+
+  FullConnection(InputLayerType& inputLayer,
+               OutputLayerType& outputLayer,
+               WeightInitRule weightInitRule = WeightInitRule()) :
+    inputLayer(inputLayer),
+    outputLayer(outputLayer),
+    optimizer(new OptimizerType()),
+    ownsOptimizer(true)
+  {
+    weightInitRule.Initialize(weights, outputLayer.InputSize(),
+        inputLayer.OutputSize() * inputLayer.LayerSlices());
+  }
+
+  /**
+   * Delete the full connection object and its optimizer.
+   */
+  ~FullConnection()
+  {
+    if (ownsOptimizer)
+      delete optimizer;
   }
 
   /**
    * Ordinary feed forward pass of a neural network, evaluating the function
-   * f(x) by propagating the activity forward through f.
+   * f(x) by propagating the activity forward through f using a dense matrix as
+   * input.
    *
    * @param input Input data used for evaluating the specified activity function.
    */
-  void FeedForward(const VecType& input)
+  template<typename eT>
+  void FeedForward(const arma::Mat<eT>& input)
   {
     outputLayer.InputActivation() += (weights * input);
   }
 
   /**
+   * Ordinary feed forward pass of a neural network, evaluating the function
+   * f(x) by propagating the activity forward through f using a 3rd order tensor
+   * as input.
+   *
+   * @param input Input data used for evaluating the specified activity function.
+   */
+  template<typename eT>
+  void FeedForward(const arma::Cube<eT>& input)
+  {
+    // Vectorise the input (cube of n slices with a 1x1 dense matrix) and
+    // perform the feed forward pass.
+    outputLayer.InputActivation() += (weights *
+        arma::vec(input.memptr(), input.n_slices));
+  }
+
+  /**
    * Ordinary feed backward pass of a neural network, calculating the function
    * f(x) by propagating x backwards trough f. Using the results from the feed
    * forward pass.
    *
    * @param error The backpropagated error.
    */
-  void FeedBackward(const VecType& error)
+  template<typename eT>
+  void FeedBackward(const arma::Col<eT>& error)
   {
     // Calculating the delta using the partial derivative of the error with
     // respect to a weight.
@@ -84,15 +128,35 @@ class FullConnection
   }
 
   /*
-   * Calculate the gradient using the output delta and the input activation.
+   * Calculate the gradient using the output delta (dense matrix) and the input
+   * activation (dense matrix).
    *
    * @param gradient The calculated gradient.
    */
-  void Gradient(MatType& gradient)
+  template<typename eT>
+  void Gradient(arma::Mat<eT>& gradient)
   {
     gradient = outputLayer.Delta() * inputLayer.InputActivation().t();
   }
 
+  /*
+   * Calculate the gradient using the output delta (3rd oder tensor) and the
+   * input activation (3rd oder tensor).
+   *
+   * @param gradient The calculated gradient.
+   */
+  template<typename eT>
+  void Gradient(arma::Cube<eT>& gradient)
+  {
+    gradient = arma::Cube<eT>(weights.n_rows, weights.n_cols, 1);
+
+    // Vectorise the input (cube of n slices with a 1x1 dense matrix) and
+    // calculate the gradient.
+    gradient.slice(0) = outputLayer.Delta() *
+        arma::rowvec(inputLayer.InputActivation().memptr(),
+        inputLayer.InputActivation().n_elem);
+  }
+
   //! Get the weights.
   MatType& Weights() const { return weights; }
   //! Modify the weights.
@@ -109,9 +173,9 @@ class FullConnection
   OutputLayerType& OutputLayer() { return outputLayer; }
 
   //! Get the optimzer.
-  OptimizerType& Optimzer() const { return optimizer; }
+  OptimizerType& Optimzer() const { return *optimizer; }
   //! Modify the optimzer.
-  OptimizerType& Optimzer() { return optimizer; }
+  OptimizerType& Optimzer() { return *optimizer; }
 
   //! Get the detla.
   VecType& Delta() const { return delta; }
@@ -128,8 +192,11 @@ class FullConnection
   //! Locally-stored connected output layer object.
   OutputLayerType& outputLayer;
 
-  //! Locally-stored optimzer object.
-  OptimizerType& optimizer;
+  //! Locally-stored pointer to the optimzer object.
+  OptimizerType* optimizer;
+
+  //! Parameter that indicates if the class owns a optimizer object.
+  bool ownsOptimizer;
 
   //! Locally-stored detla object that holds the calculated delta.
   VecType delta;



More information about the mlpack-git mailing list