[mlpack-git] master: add duplicate option check and drop unused functions (b86e427)
gitdub at mlpack.org
gitdub at mlpack.org
Tue Jun 7 00:28:37 EDT 2016
Repository : https://github.com/mlpack/mlpack
On branch : master
Link : https://github.com/mlpack/mlpack/compare/4fa39b6ab0baa1428116d0406264b5452e716d06...97402b9098d9d72889aa795923cf8fd67a4d87bf
>---------------------------------------------------------------
commit b86e427e4c7fe24bd4dca652fb561ff6f986cf80
Author: Keon Kim <kwk236 at gmail.com>
Date: Tue Jun 7 13:28:37 2016 +0900
add duplicate option check and drop unused functions
>---------------------------------------------------------------
b86e427e4c7fe24bd4dca652fb561ff6f986cf80
.gitignore | 3 +++
src/mlpack/core/util/cli.cpp | 37 ++-----------------------------------
src/mlpack/core/util/cli.hpp | 18 +-----------------
src/mlpack/core/util/cli_impl.hpp | 25 ++++++++++++++++++++++---
src/mlpack/methods/ann/ffn.hpp | 24 ++++++++++++------------
src/mlpack/methods/ann/rnn.hpp | 20 ++++++++++----------
6 files changed, 50 insertions(+), 77 deletions(-)
diff --git a/.gitignore b/.gitignore
index 19b7551..64d4c03 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,3 +6,6 @@ build*
*.bak
src/mlpack/core/util/gitversion.hpp
src/mlpack/core/util/arma_config.hpp
+
+.idea
+
diff --git a/src/mlpack/core/util/cli.cpp b/src/mlpack/core/util/cli.cpp
index 626e5af..a9a353a 100644
--- a/src/mlpack/core/util/cli.cpp
+++ b/src/mlpack/core/util/cli.cpp
@@ -6,16 +6,12 @@
*/
#include <list>
#include <boost/program_options.hpp>
-#include <boost/any.hpp>
#include <boost/scoped_ptr.hpp>
#include <iostream>
-#include <string>
#include "cli.hpp"
#include "log.hpp"
-#include "option.hpp"
-
using namespace mlpack;
using namespace mlpack::util;
@@ -108,7 +104,8 @@ void CLI::Add(const std::string& identifier,
po::options_description& desc = CLI::GetSingleton().desc;
// Must make use of boost option name syntax.
- std::string progOptId = alias.length() ? identifier + "," + alias : identifier;
+ std::string progOptId =
+ alias.length() ? identifier + "," + alias : identifier;
// Deal with a required alias.
AddAlias(alias, identifier);
@@ -453,36 +450,6 @@ void CLI::RemoveDuplicateFlags(po::basic_parsed_options<char>& bpo)
}
}
-/**
- * Parses a stream for arguments
- *
- * @param stream The stream to be parsed.
- */
-void CLI::ParseStream(std::istream& stream)
-{
- po::variables_map& vmap = GetSingleton().vmap;
- po::options_description& desc = GetSingleton().desc;
-
- // Parse the stream; place options & values into vmap.
- try
- {
- po::store(po::parse_config_file(stream, desc), vmap);
- }
- catch (std::exception& ex)
- {
- Log::Fatal << ex.what() << std::endl;
- }
-
- // Flush the buffer; make sure changes are propagated to vmap.
- po::notify(vmap);
-
- UpdateGmap();
- DefaultMessages();
- RequiredOptions();
-
- Timer::Start("total_time");
-}
-
/* Prints out the current hierarchy. */
void CLI::Print()
{
diff --git a/src/mlpack/core/util/cli.hpp b/src/mlpack/core/util/cli.hpp
index f6ec7dc..8f9cf3f 100644
--- a/src/mlpack/core/util/cli.hpp
+++ b/src/mlpack/core/util/cli.hpp
@@ -635,13 +635,6 @@ class CLI
*/
static void RemoveDuplicateFlags(po::basic_parsed_options<char>& bpo);
- /**
- * Parses a stream for arguments.
- *
- * @param stream The stream to be parsed.
- */
- static void ParseStream(std::istream& stream);
-
/**
* Print out the current hierarchy.
*/
@@ -673,7 +666,7 @@ class CLI
//! Values of the options given by user.
po::variables_map vmap;
- //! Pathnames of required options.
+ //! Identifier names of required options.
std::list<std::string> requiredOptions;
//! Map of global values.
@@ -729,15 +722,6 @@ class CLI
static void RequiredOptions();
/**
- * Cleans up input pathnames, rendering strings such as /foo/bar
- * and foo/bar/ equivalent inputs.
- *
- * @param str Input string.
- * @return Sanitized string.
- */
- static std::string SanitizeString(const std::string& str);
-
- /**
* Parses the values given on the command line, overriding any default values.
*/
static void UpdateGmap();
diff --git a/src/mlpack/core/util/cli_impl.hpp b/src/mlpack/core/util/cli_impl.hpp
index 1960618..4e8638a 100644
--- a/src/mlpack/core/util/cli_impl.hpp
+++ b/src/mlpack/core/util/cli_impl.hpp
@@ -9,10 +9,20 @@
// In case it has not already been included.
#include "cli.hpp"
+#include "prefixedoutstream.hpp"
// Include option.hpp here because it requires CLI but is also templated.
#include "option.hpp"
+// Color code escape sequences.
+#ifndef _WIN32
+ #define BASH_RED "\033[0;31m"
+ #define BASH_CLEAR "\033[0m"
+#else
+ #define BASH_RED ""
+ #define BASH_CLEAR ""
+#endif
+
namespace mlpack {
/**
@@ -33,10 +43,21 @@ void CLI::Add(const std::string& identifier,
const std::string& alias,
bool required)
{
+ util::PrefixedOutStream outstr(std::cerr,
+ BASH_RED "[FATAL] " BASH_CLEAR, false, true /* fatal */);
+ gmap_t& gmap = GetSingleton().globalValues;
+ amap_t& amap = GetSingleton().aliasValues;
+ if (gmap.count(identifier))
+ outstr << "Parameter --" << identifier << "(-" << alias << ") "
+ << "is defined multiple times with same identifiers." << std::endl;
+ if (amap.count(alias))
+ outstr << "Parameter --" << identifier << "(-" << alias << ") "
+ << "is defined multiple times with same alias." << std::endl;
po::options_description& desc = CLI::GetSingleton().desc;
// Must make use of boost syntax here.
- std::string progOptId = alias.length() ? identifier + "," + alias : identifier;
+ std::string progOptId =
+ alias.length() ? identifier + "," + alias : identifier;
// Add the alias, if necessary
AddAlias(alias, identifier);
@@ -45,8 +66,6 @@ void CLI::Add(const std::string& identifier,
desc.add_options()(progOptId.c_str(), po::value<T>(), description.c_str());
// Make sure the appropriate metadata is inserted into gmap.
- gmap_t& gmap = GetSingleton().globalValues;
-
ParamData data;
T tmp = T();
diff --git a/src/mlpack/methods/ann/ffn.hpp b/src/mlpack/methods/ann/ffn.hpp
index 3de7252..b06fb14 100644
--- a/src/mlpack/methods/ann/ffn.hpp
+++ b/src/mlpack/methods/ann/ffn.hpp
@@ -22,7 +22,7 @@ namespace ann /** Artificial Neural Network. */ {
* Implementation of a standard feed forward network.
*
* @tparam LayerTypes Contains all layer modules used to construct the network.
- * @tparam OutputLayerType The outputlayer type used to evaluate the network.
+ * @tparam OutputLayerType The output layer type used to evaluate the network.
* @tparam InitializationRuleType Rule used to initialize the weight matrix.
* @tparam PerformanceFunction Performance strategy used to calculate the error.
*/
@@ -48,14 +48,14 @@ class FFN
* be used.
*
* @param network Network modules used to construct the network.
- * @param outputLayer Outputlayer used to evaluate the network.
+ * @param outputLayer Output layer used to evaluate the network.
* @param predictors Input training variables.
* @param responses Outputs resulting from input training variables.
* @param optimizer Instantiated optimizer used to train the model.
* @param initializeRule Optional instantiated InitializationRule object
- * for initializing the network paramter.
+ * for initializing the network parameter.
* @param performanceFunction Optional instantiated PerformanceFunction
- * object used to claculate the error.
+ * object used to calculate the error.
*/
template<typename LayerType,
typename OutputType,
@@ -74,13 +74,13 @@ class FFN
* initialize rule and performance function should be used.
*
* @param network Network modules used to construct the network.
- * @param outputLayer Outputlayer used to evaluate the network.
+ * @param outputLayer Output layer used to evaluate the network.
* @param predictors Input training variables.
* @param responses Outputs resulting from input training variables.
* @param initializeRule Optional instantiated InitializationRule object
- * for initializing the network paramter.
+ * for initializing the network parameter.
* @param performanceFunction Optional instantiated PerformanceFunction
- * object used to claculate the error.
+ * object used to calculate the error.
*/
template<typename LayerType, typename OutputType>
FFN(LayerType &&network,
@@ -96,11 +96,11 @@ class FFN
* training.
*
* @param network Network modules used to construct the network.
- * @param outputLayer Outputlayer used to evaluate the network.
+ * @param outputLayer Output layer used to evaluate the network.
* @param initializeRule Optional instantiated InitializationRule object
- * for initializing the network paramter.
+ * for initializing the network parameter.
* @param performanceFunction Optional instantiated PerformanceFunction
- * object used to claculate the error.
+ * object used to calculate the error.
*/
template<typename LayerType, typename OutputType>
FFN(LayerType &&network,
@@ -408,10 +408,10 @@ private:
//! Instantiated feedforward network.
LayerTypes network;
- //! The outputlayer used to evaluate the network
+ //! The output layer used to evaluate the network
OutputLayerType outputLayer;
- //! Performance strategy used to claculate the error.
+ //! Performance strategy used to calculate the error.
PerformanceFunction performanceFunc;
//! The current evaluation mode (training or testing).
diff --git a/src/mlpack/methods/ann/rnn.hpp b/src/mlpack/methods/ann/rnn.hpp
index 473f12e..39789bf 100644
--- a/src/mlpack/methods/ann/rnn.hpp
+++ b/src/mlpack/methods/ann/rnn.hpp
@@ -24,7 +24,7 @@ namespace ann /** Artificial Neural Network. */ {
* Implementation of a standard recurrent neural network.
*
* @tparam LayerTypes Contains all layer modules used to construct the network.
- * @tparam OutputLayerType The outputlayer type used to evaluate the network.
+ * @tparam OutputLayerType The output layer type used to evaluate the network.
* @tparam InitializationRuleType Rule used to initialize the weight matrix.
* @tparam PerformanceFunction Performance strategy used to calculate the error.
*/
@@ -50,14 +50,14 @@ class RNN
* be used.
*
* @param network Network modules used to construct the network.
- * @param outputLayer Outputlayer used to evaluate the network.
+ * @param outputLayer Output layer used to evaluate the network.
* @param predictors Input training variables.
* @param responses Outputs resulting from input training variables.
* @param optimizer Instantiated optimizer used to train the model.
* @param initializeRule Optional instantiated InitializationRule object
- * for initializing the network paramter.
+ * for initializing the network parameter.
* @param performanceFunction Optional instantiated PerformanceFunction
- * object used to claculate the error.
+ * object used to calculate the error.
*/
template<typename LayerType,
typename OutputType,
@@ -76,13 +76,13 @@ class RNN
* initialize rule and performance function should be used.
*
* @param network Network modules used to construct the network.
- * @param outputLayer Outputlayer used to evaluate the network.
+ * @param outputLayer Output layer used to evaluate the network.
* @param predictors Input training variables.
* @param responses Outputs resulting from input training variables.
* @param initializeRule Optional instantiated InitializationRule object
- * for initializing the network paramter.
+ * for initializing the network parameter.
* @param performanceFunction Optional instantiated PerformanceFunction
- * object used to claculate the error.
+ * object used to calculate the error.
*/
template<typename LayerType, typename OutputType>
RNN(LayerType &&network,
@@ -98,11 +98,11 @@ class RNN
* training.
*
* @param network Network modules used to construct the network.
- * @param outputLayer Outputlayer used to evaluate the network.
+ * @param outputLayer Output layer used to evaluate the network.
* @param initializeRule Optional instantiated InitializationRule object
- * for initializing the network paramter.
+ * for initializing the network parameter.
* @param performanceFunction Optional instantiated PerformanceFunction
- * object used to claculate the error.
+ * object used to calculate the error.
*/
template<typename LayerType, typename OutputType>
RNN(LayerType &&network,
More information about the mlpack-git
mailing list