[mlpack-git] master: Minor style fixes. (3fbabd7)
gitdub at mlpack.org
gitdub at mlpack.org
Fri Mar 18 08:21:10 EDT 2016
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/80943dd398d652aa5ccb8461726a710d04fae925...322ff1c0622c7574800014f21d8a537c68101b5f
>---------------------------------------------------------------
commit 3fbabd79fe00a6fe32af5dde40c435c86b4000b5
Author: marcus <marcus.edel at fu-berlin.de>
Date: Fri Mar 18 13:21:10 2016 +0100
Minor style fixes.
>---------------------------------------------------------------
3fbabd79fe00a6fe32af5dde40c435c86b4000b5
src/mlpack/core/optimizers/CMakeLists.txt | 2 ++
src/mlpack/core/optimizers/adadelta/ada_delta.hpp | 19 ++++++++++++++-----
.../core/optimizers/adadelta/ada_delta_impl.hpp | 15 +++++++++++----
3 files changed, 27 insertions(+), 9 deletions(-)
diff --git a/src/mlpack/core/optimizers/CMakeLists.txt b/src/mlpack/core/optimizers/CMakeLists.txt
index 3e16f02..c5163da 100644
--- a/src/mlpack/core/optimizers/CMakeLists.txt
+++ b/src/mlpack/core/optimizers/CMakeLists.txt
@@ -1,4 +1,6 @@
set(DIRS
+ adadelta
+ adam
aug_lagrangian
lbfgs
minibatch_sgd
diff --git a/src/mlpack/core/optimizers/adadelta/ada_delta.hpp b/src/mlpack/core/optimizers/adadelta/ada_delta.hpp
index dbe5886..067a4bf 100644
--- a/src/mlpack/core/optimizers/adadelta/ada_delta.hpp
+++ b/src/mlpack/core/optimizers/adadelta/ada_delta.hpp
@@ -1,3 +1,12 @@
+/**
+ * @file ada_delta.hpp
+ * @author Ryan Curtin
+ * @author Vasanth Kalingeri
+ *
+ * Implementation of the Adadelta optimizer. Adadelta is an optimizer that
+ * dynamically adapts over time using only first order information.
+ * Additionally, Adadelta requires no manual tuning of a learning rate.
+ */
#ifndef __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_HPP
#define __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_HPP
@@ -50,9 +59,9 @@ class AdaDelta
{
public:
/**
- * Construct the AdaDelta optimizer with the given function and parameters. The
- * defaults here are not necessarily good for the given problem, so it is
- * suggested that the values used be tailored to the task at hand. The
+ * Construct the AdaDelta optimizer with the given function and parameters.
+ * The defaults here are not necessarily good for the given problem, so it is
+ * suggested that the values used be tailored to the task at hand. The
* maximum number of iterations refers to the maximum number of points that
* are processed (i.e., one iteration equals one point; one iteration does not
* equal one pass over the dataset).
@@ -74,8 +83,8 @@ class AdaDelta
const bool shuffle = true);
/**
- * Optimize the given function using AdaDelta. The given starting point will be
- * modified to store the finishing point of the algorithm, and the final
+ * Optimize the given function using AdaDelta. The given starting point will
+ * be modified to store the finishing point of the algorithm, and the final
* objective value is returned.
*
* @param iterate Starting point (will be modified).
diff --git a/src/mlpack/core/optimizers/adadelta/ada_delta_impl.hpp b/src/mlpack/core/optimizers/adadelta/ada_delta_impl.hpp
index ac08a62..41b3d02 100644
--- a/src/mlpack/core/optimizers/adadelta/ada_delta_impl.hpp
+++ b/src/mlpack/core/optimizers/adadelta/ada_delta_impl.hpp
@@ -1,3 +1,10 @@
+/**
+ * @file ada_delta_impl.hpp
+ * @author Ryan Curtin
+ * @author Vasanth Kalingeri
+ *
+ * Implementation of the Adadelta optimizer.
+ */
#ifndef __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_IMPL_HPP
#define __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_IMPL_HPP
@@ -73,8 +80,8 @@ double AdaDelta<DecomposableFunctionType>::Optimize(arma::mat& iterate)
if (std::abs(lastObjective - overallObjective) < tolerance)
{
- Log::Info << "AdaDelta: minimized within tolerance " << tolerance << "; "
- << "terminating optimization." << std::endl;
+ Log::Info << "AdaDelta: minimized within tolerance " << tolerance
+ << "; terminating optimization." << std::endl;
return overallObjective;
}
@@ -114,8 +121,8 @@ double AdaDelta<DecomposableFunctionType>::Optimize(arma::mat& iterate)
overallObjective += function.Evaluate(iterate, currentFunction);
}
- Log::Info << "AdaDelta: maximum iterations (" << maxIterations << ") reached; "
- << "terminating optimization." << std::endl;
+ Log::Info << "AdaDelta: maximum iterations (" << maxIterations
+ << ") reached; terminating optimization." << std::endl;
// Calculate final objective.
overallObjective = 0;
for (size_t i = 0; i < numFunctions; ++i)
More information about the mlpack-git
mailing list