[mlpack-git] master: Reorder the parameter of the DropConnectLayer to avoid reorder warning. (592fdcd)
gitdub at mlpack.org
gitdub at mlpack.org
Sat Apr 9 12:08:09 EDT 2016
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/0e8d776e03b8dbe8e605063b388115cb22b1860d...592fdcda156aa58aba3209017b585e7cfd12c345
>---------------------------------------------------------------
commit 592fdcda156aa58aba3209017b585e7cfd12c345
Author: Marcus Edel <marcus.edel at fu-berlin.de>
Date: Sat Apr 9 18:08:09 2016 +0200
Reorder the parameter of the DropConnectLayer to avoid reorder warning.
>---------------------------------------------------------------
592fdcda156aa58aba3209017b585e7cfd12c345
src/mlpack/methods/ann/layer/dropconnect_layer.hpp | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/src/mlpack/methods/ann/layer/dropconnect_layer.hpp b/src/mlpack/methods/ann/layer/dropconnect_layer.hpp
index 2c9a651..bd8fe6e 100644
--- a/src/mlpack/methods/ann/layer/dropconnect_layer.hpp
+++ b/src/mlpack/methods/ann/layer/dropconnect_layer.hpp
@@ -304,6 +304,9 @@ class DropConnectLayer
}
private:
+ //! Locally-stored layer object.
+ InputLayer baseLayer;
+
//! Locally stored number of input units.
size_t inSize;
@@ -313,6 +316,9 @@ private:
//! The probability of setting a value to zero.
double ratio;
+ //! The scale fraction.
+ double scale;
+
//! If true the default layer is used otherwise a new layer will be created.
bool uselayer;
@@ -322,9 +328,6 @@ private:
//! Locally-stored delta object.
OutputDataType delta;
- //! Locally-stored layer object.
- InputLayer baseLayer;
-
//! Locally-stored gradient object.
OutputDataType gradient;
@@ -337,9 +340,6 @@ private:
//! Locally-stored mast object.
OutputDataType mask;
- //! The scale fraction.
- double scale;
-
//! If true dropout and scaling is disabled, see notes above.
bool deterministic;
More information about the mlpack-git
mailing list