-
Ludovic Moncla authored0f45f5be
classifiers.py 1.76 KiB
from sklearn.naive_bayes import MultinomialNB
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import SGDClassifier
from sklearn.neighbors import KNeighborsClassifier
import numpy as np
classifiers = [
('bayes', MultinomialNB()),
('svm', SVC() ),
('decisionTree',DecisionTreeClassifier()),
('rfc', RandomForestClassifier()),
('lr', LogisticRegression()),
('sgd', SGDClassifier()),
('knn', KNeighborsClassifier())
]
param_grid_svm = {'C':[1,10,100,1000],'gamma':[1,0.1,0.001,0.0001], 'kernel':['linear','rbf']}
param_grid_decisionTree = { 'criterion' : ['gini', 'entropy'], 'max_depth':range(5,10), 'min_samples_split': range(5,10), 'min_samples_leaf': range(1,5) }
param_grid_rfc = { 'n_estimators': [200, 500], 'max_features': ['auto', 'sqrt', 'log2'], 'max_depth' : [4,5,6,7,8], 'criterion' :['gini', 'entropy'] }
param_grid_lr = {"C":np.logspace(-3,3,7), "penalty":["l1","l2"]}
param_grid_sgd = { "loss" : ["hinge", "log", "squared_hinge", "modified_huber"], "alpha" : [0.0001, 0.001, 0.01, 0.1], "penalty" : ["l2", "l1", "none"], "max_iter" : [500]}
param_grid_knn = {'n_neighbors' : list(range(1,20)), 'weights' : ['uniform', 'distance'], 'metric' : ['euclidean', 'manhattan'] }
grid_params = [
('bayes', None),
('svm', param_grid_svm),
('decisionTree', param_grid_decisionTree),
('rfc', param_grid_rfc ),
('lr', param_grid_lr),
('sgd', param_grid_sgd ),
('knn', param_grid_knn),
]