[mlpack-git] master: fix bug--name confliction (0616233)

gitdub at mlpack.org gitdub at mlpack.org
Wed Mar 2 13:07:23 EST 2016


Repository : https://github.com/mlpack/mlpack
On branch  : master
Link       : https://github.com/mlpack/mlpack/compare/8ffa4700430ec1b5541333673c7a7d11568c90d0...bc2bd172e1d1d0f5fcbcfafffedecba9d6e47a91

>---------------------------------------------------------------

commit 0616233e730b42b9aeb050d222602ecffaf828a0
Author: stereomatchingkiss <stereomatchingkiss at gmail.com>
Date:   Thu Mar 3 02:07:23 2016 +0800

    fix bug--name confliction


>---------------------------------------------------------------

0616233e730b42b9aeb050d222602ecffaf828a0
 src/mlpack/methods/ann/cnn_impl.hpp | 12 ++++++------
 src/mlpack/methods/ann/ffn_impl.hpp | 12 ++++++------
 src/mlpack/methods/ann/rnn_impl.hpp | 12 ++++++------
 3 files changed, 18 insertions(+), 18 deletions(-)

diff --git a/src/mlpack/methods/ann/cnn_impl.hpp b/src/mlpack/methods/ann/cnn_impl.hpp
index 3b568c4..a4e7733 100644
--- a/src/mlpack/methods/ann/cnn_impl.hpp
+++ b/src/mlpack/methods/ann/cnn_impl.hpp
@@ -46,8 +46,8 @@ CNN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 
   // Train the model.
   Timer::Start("cnn_optimization");
@@ -83,8 +83,8 @@ CNN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 
   Train(predictors, responses);
 }
@@ -112,8 +112,8 @@ CNN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 }
 
 template<typename LayerTypes,
diff --git a/src/mlpack/methods/ann/ffn_impl.hpp b/src/mlpack/methods/ann/ffn_impl.hpp
index 041b301..3301718 100644
--- a/src/mlpack/methods/ann/ffn_impl.hpp
+++ b/src/mlpack/methods/ann/ffn_impl.hpp
@@ -46,8 +46,8 @@ FFN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 
   // Train the model.
   Timer::Start("ffn_optimization");
@@ -83,8 +83,8 @@ FFN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 
   Train(predictors, responses);
 }
@@ -112,8 +112,8 @@ FFN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 }
 
 template<typename LayerTypes,
diff --git a/src/mlpack/methods/ann/rnn_impl.hpp b/src/mlpack/methods/ann/rnn_impl.hpp
index a55915f..743bd4e 100644
--- a/src/mlpack/methods/ann/rnn_impl.hpp
+++ b/src/mlpack/methods/ann/rnn_impl.hpp
@@ -48,8 +48,8 @@ RNN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 
   // Train the model.
   Timer::Start("rnn_optimization");
@@ -87,8 +87,8 @@ RNN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 
   Train(predictors, responses);
 }
@@ -118,8 +118,8 @@ RNN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
                 OutputLayerType>::value,
                 "The type of outputLayer must be OutputLayerType.");
 
-  initializeRule.Initialize(parameter, NetworkSize(network), 1);
-  NetworkWeights(parameter, network);
+  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
+  NetworkWeights(parameter, this->network);
 }
 
 template<typename LayerTypes,




More information about the mlpack-git mailing list