[mlpack-git] master: Remove unused function; we only need to support 3rd-order tensors. (4618086)
gitdub at big.cc.gt.atl.ga.us
gitdub at big.cc.gt.atl.ga.us
Thu Sep 3 08:35:39 EDT 2015
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/91ae1062772a0f2bbca9a072769629c2d775ae64...42d61dfdbc9b0cbce59398e67ea58544b0fa1919
>---------------------------------------------------------------
commit 461808614243e4dd0accb4184bd7a81d4adb8a08
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Thu Sep 3 14:23:50 2015 +0200
Remove unused function; we only need to support 3rd-order tensors.
>---------------------------------------------------------------
461808614243e4dd0accb4184bd7a81d4adb8a08
src/mlpack/methods/ann/layer/pooling_layer.hpp | 17 -----------------
1 file changed, 17 deletions(-)
diff --git a/src/mlpack/methods/ann/layer/pooling_layer.hpp b/src/mlpack/methods/ann/layer/pooling_layer.hpp
index 7a85a83..a801c78 100644
--- a/src/mlpack/methods/ann/layer/pooling_layer.hpp
+++ b/src/mlpack/methods/ann/layer/pooling_layer.hpp
@@ -75,23 +75,6 @@ class PoolingLayer
Pooling(input.slice(s), output.slice(s));
}
- /*
- * Ordinary feed backward pass of a neural network, calculating the function
- * f(x) by propagating x backwards through f. Using the results from the feed
- * forward pass.
- *
- * @param input The propagated input activation.
- * @param gy The backpropagated error.
- * @param g The calculated gradient.
- */
- template<typename eT>
- void Backward(const arma::Mat<eT>& input,
- const arma::Mat<eT>& gy,
- arma::Mat<eT>& g)
- {
- Unpooling(input, gy, g);
- }
-
/**
* Ordinary feed backward pass of a neural network, using 3rd-order tensors as
* input, calculating the function f(x) by propagating x backwards through f.
More information about the mlpack-git
mailing list