[mlpack-svn] r15524 - in mlpack/conf/jenkins-conf/benchmark/methods: mlpy shogun
fastlab-svn at coffeetalk-1.cc.gatech.edu
fastlab-svn at coffeetalk-1.cc.gatech.edu
Mon Jul 22 10:38:38 EDT 2013
Author: marcus
Date: Mon Jul 22 10:38:38 2013
New Revision: 15524
Log:
Clean up shogun scripts.
Modified:
mlpack/conf/jenkins-conf/benchmark/methods/mlpy/lars.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/allknn.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/gmm.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/kernel_pca.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/kmeans.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/lars.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/linear_regression.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/nbc.py
mlpack/conf/jenkins-conf/benchmark/methods/shogun/pca.py
Modified: mlpack/conf/jenkins-conf/benchmark/methods/mlpy/lars.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/mlpy/lars.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/mlpy/lars.py Mon Jul 22 10:38:38 2013
@@ -69,7 +69,7 @@
def RunMethod(self, options):
Log.Info("Perform LARS.", self.verbose)
- if len(self.dataset) < 2:
+ if len(self.dataset) != 2:
Log.Fatal("This method requires two datasets.")
return -1
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/allknn.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/allknn.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/allknn.py Mon Jul 22 10:38:38 2013
@@ -40,12 +40,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement All K-Nearest-Neighbors.
@param options - Extra options for the method.
@@ -55,7 +49,7 @@
totalTimer = Timer()
# Load input dataset.
- # If the dataset contains two files then the second file is the query file
+ # If the dataset contains two files then the second file is the query file.
# In this case we add this to the command line.
Log.Info("Loading dataset", self.verbose)
if len(self.dataset) == 2:
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/gmm.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/gmm.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/gmm.py Mon Jul 22 10:38:38 2013
@@ -39,12 +39,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement Gaussian Mixture Model.
@param options - Extra options for the method.
@@ -69,7 +63,7 @@
model = Clustering.GMM(g)
model.set_features(dataFeat)
with totalTimer:
- model.train_em(max_iter=n)
+ model.train_em(1e-9, n, 1e-9)
return totalTimer.ElapsedTime()
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/kernel_pca.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/kernel_pca.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/kernel_pca.py Mon Jul 22 10:38:38 2013
@@ -40,12 +40,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement Kernel Principal Components Analysis.
@param options - Extra options for the method.
@@ -79,10 +73,7 @@
return -1
elif kernel.group(1) == "polynomial":
degree = re.search('-D (\d+)', options)
- if not degree:
- degree = 1
- else:
- degree = int(degree.group(1))
+ degree = 1 if not degree else int(degree.group(1))
kernel = PolyKernel(dataFeat, dataFeat, degree, True)
elif kernel.group(1) == "gaussian":
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/kmeans.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/kmeans.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/kmeans.py Mon Jul 22 10:38:38 2013
@@ -40,12 +40,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement K-Means Clustering.
@param options - Extra options for the method.
@@ -166,5 +160,4 @@
@return Elapsed time in seconds.
'''
def GetTime(self, timer):
- time = timer.total_time
- return time
+ return timer.total_time
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/lars.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/lars.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/lars.py Mon Jul 22 10:38:38 2013
@@ -39,12 +39,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement Least Angle Regression.
@param options - Extra options for the method.
@@ -62,10 +56,7 @@
# Get all the parameters.
lambda1 = re.search("-l (\d+)", options)
- if not lambda1:
- lambda1 = 0.0
- else:
- lambda1 = int(lambda1.group(1))
+ lambda1 = 0.0 if not lambda1 else int(lambda1.group(1))
with totalTimer:
# Perform LARS.
@@ -87,8 +78,8 @@
def RunMethod(self, options):
Log.Info("Perform LARS.", self.verbose)
- if len(self.dataset) < 2:
- Log.Fatal("The method need two datasets.")
+ if len(self.dataset) != 2:
+ Log.Fatal("This method requires two datasets.")
return -1
return self.LARSShogun(options)
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/linear_regression.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/linear_regression.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/linear_regression.py Mon Jul 22 10:38:38 2013
@@ -39,12 +39,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement Linear Regression.
@param options - Extra options for the method.
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/nbc.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/nbc.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/nbc.py Mon Jul 22 10:38:38 2013
@@ -39,12 +39,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement Naive Bayes Classifier.
@param options - Extra options for the method.
@@ -84,8 +78,8 @@
def RunMethod(self, options):
Log.Info("Perform NBC.", self.verbose)
- if len(self.dataset) < 2:
- Log.Fatal("The method need two datasets.")
+ if len(self.dataset) != 2:
+ Log.Fatal("This method requires two datasets.")
return -1
return self.NBCShogun(options)
Modified: mlpack/conf/jenkins-conf/benchmark/methods/shogun/pca.py
==============================================================================
--- mlpack/conf/jenkins-conf/benchmark/methods/shogun/pca.py (original)
+++ mlpack/conf/jenkins-conf/benchmark/methods/shogun/pca.py Mon Jul 22 10:38:38 2013
@@ -39,12 +39,6 @@
self.dataset = dataset
'''
- Destructor to clean up at the end.
- '''
- def __del__(self):
- pass
-
- '''
Use the shogun libary to implement Principal Components Analysis.
@param options - Extra options for the method.
More information about the mlpack-svn
mailing list