[mlpack-git] master: Fix compiler is out of heap space error when using MSVC 2015. (f58aa36)

gitdub at mlpack.org gitdub at mlpack.org
Mon Mar 7 11:28:07 EST 2016


Repository : https://github.com/mlpack/mlpack
On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/cff6a986f21e6c9a4e8a70cd2efec978ad55dc7d...f58aa3662eae235f74ed8fc6ef1e3446b0f28445

>---------------------------------------------------------------

commit f58aa3662eae235f74ed8fc6ef1e3446b0f28445
Author: marcus <marcus.edel at fu-berlin.de>
Date:   Mon Mar 7 17:28:07 2016 +0100

    Fix compiler is out of heap space error when using MSVC 2015.


>---------------------------------------------------------------

f58aa3662eae235f74ed8fc6ef1e3446b0f28445
 src/mlpack/methods/ann/rnn.hpp | 87 +++++++++++++++++++++++++++++++-----------
 1 file changed, 64 insertions(+), 23 deletions(-)

diff --git a/src/mlpack/methods/ann/rnn.hpp b/src/mlpack/methods/ann/rnn.hpp
index 2c6210c..9d1d21b 100644
--- a/src/mlpack/methods/ann/rnn.hpp
+++ b/src/mlpack/methods/ann/rnn.hpp
@@ -377,19 +377,28 @@ class RNN
   /**
    * Save the network layer activations.
    */
-  template<size_t I = 0, typename... Tp>
-  typename std::enable_if<I == sizeof...(Tp), void>::type
+  template<
+      size_t I = 0,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename... Tp
+  >
+  typename std::enable_if<I == Max, void>::type
   SaveActivations(std::tuple<Tp...>& /* unused */)
   {
+    Save(I, std::get<I>(network), std::get<I>(network).InputParameter());
     LinkRecurrent(network);
   }
 
-  template<size_t I = 0, typename... Tp>
-  typename std::enable_if<I < sizeof...(Tp), void>::type
+  template<
+      size_t I = 0,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename... Tp
+  >
+  typename std::enable_if<I < Max, void>::type
   SaveActivations(std::tuple<Tp...>& network)
   {
     Save(I, std::get<I>(network), std::get<I>(network).InputParameter());
-    SaveActivations<I + 1, Tp...>(network);
+    SaveActivations<I + 1, Max, Tp...>(network);
   }
 
   /**
@@ -427,19 +436,28 @@ class RNN
   /**
    * Load the network layer activations.
    */
-  template<size_t I = 0, typename DataType, typename... Tp>
-  typename std::enable_if<I == sizeof...(Tp), void>::type
+  template<
+      size_t I = 0,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename DataType, typename... Tp
+  >
+  typename std::enable_if<I == Max, void>::type
   LoadActivations(DataType& input, std::tuple<Tp...>& network)
   {
+    Load(I, std::get<I>(network), std::get<I>(network).InputParameter());
     std::get<0>(network).InputParameter() = input;
   }
 
-  template<size_t I = 0, typename DataType, typename... Tp>
-  typename std::enable_if<I < sizeof...(Tp), void>::type
+  template<
+      size_t I = 0,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename DataType, typename... Tp
+  >
+  typename std::enable_if<I < Max, void>::type
   LoadActivations(DataType& input, std::tuple<Tp...>& network)
   {
     Load(I, std::get<I>(network), std::get<I>(network).InputParameter());
-    LoadActivations<I + 1, DataType, Tp...>(input, network);
+    LoadActivations<I + 1, Max, DataType, Tp...>(input, network);
   }
 
   /**
@@ -494,12 +512,28 @@ class RNN
   /**
    * Link the calculated activation with the correct layer.
    */
-  template<size_t I = 1, typename... Tp>
-  typename std::enable_if<I == sizeof...(Tp), void>::type
-  LinkParameter(std::tuple<Tp ...>& /* unused */) { /* Nothing to do here */ }
+  template<
+      size_t I = 1,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename... Tp
+  >
+  typename std::enable_if<I == Max, void>::type
+  LinkParameter(std::tuple<Tp ...>& /* unused */)
+  {
+    if (!LayerTraits<typename std::remove_reference<
+        decltype(std::get<I>(network))>::type>::IsBiasLayer)
+    {
+      std::get<I>(network).InputParameter() = std::get<I - 1>(
+          network).OutputParameter();
+    }
+  }
 
-  template<size_t I = 1, typename... Tp>
-  typename std::enable_if<I < sizeof...(Tp), void>::type
+  template<
+      size_t I = 1,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename... Tp
+  >
+  typename std::enable_if<I < Max, void>::type
   LinkParameter(std::tuple<Tp...>& network)
   {
     if (!LayerTraits<typename std::remove_reference<
@@ -509,23 +543,31 @@ class RNN
           network).OutputParameter();
     }
 
-    LinkParameter<I + 1, Tp...>(network);
+    LinkParameter<I + 1, Max, Tp...>(network);
   }
 
   /**
    * Link the calculated activation with the correct recurrent layer.
    */
-  template<size_t I = 0, typename... Tp>
-  typename std::enable_if<I == (sizeof...(Tp) - 1), void>::type
+  template<
+      size_t I = 0,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename... Tp
+  >
+  typename std::enable_if<I == Max, void>::type
   LinkRecurrent(std::tuple<Tp ...>& /* unused */) { /* Nothing to do here */ }
 
-  template<size_t I = 0, typename... Tp>
-  typename std::enable_if<I < (sizeof...(Tp) - 1), void>::type
+  template<
+      size_t I = 0,
+      size_t Max = std::tuple_size<LayerTypes>::value - 1,
+      typename... Tp
+  >
+  typename std::enable_if<I < Max, void>::type
   LinkRecurrent(std::tuple<Tp...>& network)
   {
     UpdateRecurrent(std::get<I>(network), std::get<I>(network).InputParameter(),
         std::get<I + 1>(network).OutputParameter());
-    LinkRecurrent<I + 1, Tp...>(network);
+    LinkRecurrent<I + 1, Max, Tp...>(network);
   }
 
   /**
@@ -571,8 +613,7 @@ class RNN
    * layer modules.
    */
   template<size_t I = 1, typename DataType, typename... Tp>
-  typename std::enable_if<I < (sizeof...(Tp) - 1), void>::type
-  Backward(DataType& error, std::tuple<Tp ...>& network)
+  void Backward(DataType& error, std::tuple<Tp ...>& network)
   {
     std::get<sizeof...(Tp) - I>(network).Backward(
         std::get<sizeof...(Tp) - I>(network).OutputParameter(), error,




More information about the mlpack-git mailing list