[mlpack-git] master: Swap naming convention. (f9d6162)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Sun May 3 16:15:37 EDT 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/0f31abbdebcd34e2113d8acf47c1d0b087377921...174d2de995a3fe343cd92d158730f3afa03e622d
>---------------------------------------------------------------
commit f9d6162b045c1d381b0c76e39a795034a6cc8a27
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Fri May 1 18:16:24 2015 +0200
Swap naming convention.
>---------------------------------------------------------------
f9d6162b045c1d381b0c76e39a795034a6cc8a27
src/mlpack/methods/ann/ffnn.hpp | 34 +++++++++++++++---------------
src/mlpack/methods/ann/rnn.hpp | 46 ++++++++++++++++++++---------------------
2 files changed, 40 insertions(+), 40 deletions(-)
diff --git a/src/mlpack/methods/ann/ffnn.hpp b/src/mlpack/methods/ann/ffnn.hpp
index cc4274c..12cc107 100644
--- a/src/mlpack/methods/ann/ffnn.hpp
+++ b/src/mlpack/methods/ann/ffnn.hpp
@@ -83,7 +83,7 @@ class FFNN
InitLayer(network);
gradientNum = 0;
- ConnectionBackward(network, error);
+ LayerBackward(network, error);
UpdateGradients(network);
}
@@ -116,7 +116,7 @@ class FFNN
std::get<0>(std::get<0>(network)).InputLayer().InputActivation() = input;
- ConnectionForward(network);
+ LayerForward(network);
OutputPrediction(network, output);
}
@@ -136,7 +136,7 @@ class FFNN
std::get<0>(std::get<0>(network)).InputLayer().InputActivation() = input;
- ConnectionForward(network);
+ LayerForward(network);
return OutputError(network, target, error);
}
@@ -194,20 +194,20 @@ class FFNN
*/
template<size_t I = 0, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- ConnectionForward(std::tuple<Tp...>& /* unused */) { }
+ LayerForward(std::tuple<Tp...>& /* unused */) { }
template<size_t I = 0, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- ConnectionForward(std::tuple<Tp...>& t)
+ LayerForward(std::tuple<Tp...>& t)
{
- LayerForward(std::get<I>(t));
+ ConnectionForward(std::get<I>(t));
// Use the first connection to perform the feed forward algorithm.
std::get<0>(std::get<I>(t)).OutputLayer().FeedForward(
std::get<0>(std::get<I>(t)).OutputLayer().InputActivation(),
std::get<0>(std::get<I>(t)).OutputLayer().InputActivation());
- ConnectionForward<I + 1, Tp...>(t);
+ LayerForward<I + 1, Tp...>(t);
}
/**
@@ -219,14 +219,14 @@ class FFNN
*/
template<size_t I = 0, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- LayerForward(std::tuple<Tp...>& /* unused */) { }
+ ConnectionForward(std::tuple<Tp...>& /* unused */) { }
template<size_t I = 0, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- LayerForward(std::tuple<Tp...>& t)
+ ConnectionForward(std::tuple<Tp...>& t)
{
std::get<I>(t).FeedForward(std::get<I>(t).InputLayer().InputActivation());
- LayerForward<I + 1, Tp...>(t);
+ ConnectionForward<I + 1, Tp...>(t);
}
/*
@@ -273,12 +273,12 @@ class FFNN
*/
template<size_t I = 0, typename VecType, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- ConnectionBackward(std::tuple<Tp...>& /* unused */, VecType& /* unused */)
+ LayerBackward(std::tuple<Tp...>& /* unused */, VecType& /* unused */)
{ }
template<size_t I = 1, typename VecType, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- ConnectionBackward(std::tuple<Tp...>& t, VecType& error)
+ LayerBackward(std::tuple<Tp...>& t, VecType& error)
{
// Distinguish between the output layer and the other layer. In case of
// the output layer use specified error vector to store the error and to
@@ -294,10 +294,10 @@ class FFNN
std::get<sizeof...(Tp) - I>(t)).OutputLayer().Delta());
}
- LayerBackward(std::get<sizeof...(Tp) - I>(t), std::get<0>(
+ ConnectionBackward(std::get<sizeof...(Tp) - I>(t), std::get<0>(
std::get<sizeof...(Tp) - I>(t)).OutputLayer().Delta());
- ConnectionBackward<I + 1, VecType, Tp...>(t, error);
+ LayerBackward<I + 1, VecType, Tp...>(t, error);
}
/**
@@ -310,11 +310,11 @@ class FFNN
*/
template<size_t I = 0, typename VecType, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- LayerBackward(std::tuple<Tp...>& /* unused */, VecType& /* unused */) { }
+ ConnectionBackward(std::tuple<Tp...>& /* unused */, VecType& /* unused */) { }
template<size_t I = 0, typename VecType, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- LayerBackward(std::tuple<Tp...>& t, VecType& error)
+ ConnectionBackward(std::tuple<Tp...>& t, VecType& error)
{
std::get<I>(t).FeedBackward(error);
@@ -327,7 +327,7 @@ class FFNN
std::get<I>(t).Delta(), std::get<I>(t).InputLayer().Delta());
}
- LayerBackward<I + 1, VecType, Tp...>(t, error);
+ ConnectionBackward<I + 1, VecType, Tp...>(t, error);
}
/**
diff --git a/src/mlpack/methods/ann/rnn.hpp b/src/mlpack/methods/ann/rnn.hpp
index 913a2ad..8efa4d5 100644
--- a/src/mlpack/methods/ann/rnn.hpp
+++ b/src/mlpack/methods/ann/rnn.hpp
@@ -106,7 +106,7 @@ class RNN
seqNum * inputSize, 0, (seqNum + 1) * inputSize - 1, 0);
// Perform the forward pass and calculate the output error.
- ConnectionForward(network);
+ LayerForward(network);
if (seqOutput)
{
arma::colvec seqError = error.unsafe_col(seqNum);
@@ -154,7 +154,7 @@ class RNN
// Perform the backward pass and update the gradient storage.
arma::colvec seqError = error.unsafe_col(seqOutput ? seqNum : 0);
- ConnectionBackward(network, seqError);
+ LayerBackward(network, seqError);
UpdateGradients(network);
// Load the network activation for the upcoming backward pass.
@@ -207,7 +207,7 @@ class RNN
seqNum * inputSize, 0, (seqNum + 1) * inputSize - 1, 0);
// Perform the forward pass and calculate the output error.
- ConnectionForward(network);
+ LayerForward(network);
if (seqOutput)
{
arma::colvec targetCol;
@@ -340,20 +340,20 @@ class RNN
*/
template<size_t I = 0, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- ConnectionForward(std::tuple<Tp...>& /* unused */) { }
+ LayerForward(std::tuple<Tp...>& /* unused */) { }
template<size_t I = 0, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- ConnectionForward(std::tuple<Tp...>& t)
+ LayerForward(std::tuple<Tp...>& t)
{
- LayerForward(std::get<I>(t));
+ ConnectionForward(std::get<I>(t));
// Use the first connection to perform the feed forward algorithm.
std::get<0>(std::get<I>(t)).OutputLayer().FeedForward(
std::get<0>(std::get<I>(t)).OutputLayer().InputActivation(),
std::get<0>(std::get<I>(t)).OutputLayer().InputActivation());
- ConnectionForward<I + 1, Tp...>(t);
+ LayerForward<I + 1, Tp...>(t);
}
/**
@@ -365,14 +365,14 @@ class RNN
*/
template<size_t I = 0, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- LayerForward(std::tuple<Tp...>& /* unused */) { }
+ ConnectionForward(std::tuple<Tp...>& /* unused */) { }
template<size_t I = 0, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- LayerForward(std::tuple<Tp...>& t)
+ ConnectionForward(std::tuple<Tp...>& t)
{
std::get<I>(t).FeedForward(std::get<I>(t).InputLayer().InputActivation());
- LayerForward<I + 1, Tp...>(t);
+ ConnectionForward<I + 1, Tp...>(t);
}
/*
@@ -426,12 +426,12 @@ class RNN
*/
template<size_t I = 0, typename VecType, typename... Tp>
typename std::enable_if<I == sizeof...(Tp) + 1, void>::type
- ConnectionBackward(std::tuple<Tp...>& /* unused */, VecType& /* unused */)
+ LayerBackward(std::tuple<Tp...>& /* unused */, VecType& /* unused */)
{ }
template<size_t I = 1, typename VecType, typename... Tp>
typename std::enable_if<I < sizeof...(Tp) + 1, void>::type
- ConnectionBackward(std::tuple<Tp...>& t, VecType& error)
+ LayerBackward(std::tuple<Tp...>& t, VecType& error)
{
// Distinguish between the output layer and the other layer. In case of
// the output layer use the specified error vector to store the error and
@@ -445,10 +445,10 @@ class RNN
std::get<0>(std::get<sizeof...(Tp) - I>(t)).OutputLayer().Delta());
}
- LayerBackward(std::get<sizeof...(Tp) - I>(t), std::get<0>(std::get<
+ ConnectionBackward(std::get<sizeof...(Tp) - I>(t), std::get<0>(std::get<
sizeof...(Tp) - I>(t)).OutputLayer().Delta(), I, sizeof...(Tp));
- ConnectionBackward<I + 1, VecType, Tp...>(t, error);
+ LayerBackward<I + 1, VecType, Tp...>(t, error);
}
/**
@@ -461,17 +461,17 @@ class RNN
*/
template<size_t I = 0, typename VecType, typename... Tp>
typename std::enable_if<I == sizeof...(Tp), void>::type
- LayerBackward(std::tuple<Tp...>& /* unused */,
- VecType& /* unused */,
- const size_t /* unused */,
- const size_t /* unused */) { }
+ ConnectionBackward(std::tuple<Tp...>& /* unused */,
+ VecType& /* unused */,
+ const size_t /* unused */,
+ const size_t /* unused */) { }
template<size_t I = 0, typename VecType, typename... Tp>
typename std::enable_if<I < sizeof...(Tp), void>::type
- LayerBackward(std::tuple<Tp...>& t,
- VecType& error,
- const size_t layer,
- const size_t layerNum)
+ ConnectionBackward(std::tuple<Tp...>& t,
+ VecType& error,
+ const size_t layer,
+ const size_t layerNum)
{
std::get<I>(t).FeedBackward(error);
@@ -510,7 +510,7 @@ class RNN
delta[deltaNum] = std::get<I>(t).InputLayer().Delta();
}
- LayerBackward<I + 1, VecType, Tp...>(t, error, layer, layerNum);
+ ConnectionBackward<I + 1, VecType, Tp...>(t, error, layer, layerNum);
}
/**
More information about the mlpack-git
mailing list