added file containing model names and hyperaprams to run for all models inc FS
This commit is contained in:
parent
9c07ad3ce8
commit
5d6dccfc09
6 changed files with 536 additions and 299 deletions
480
classification_params_FS.py
Normal file
480
classification_params_FS.py
Normal file
|
@ -0,0 +1,480 @@
|
|||
########################################################################
|
||||
#======================
|
||||
# AdaBoostClassifier()
|
||||
#======================
|
||||
estimator = AdaBoostClassifier(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_abc = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('clf', AdaBoostClassifier(**rs))])
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_abc = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [AdaBoostClassifier(**rs)],
|
||||
'clf__n_estimators': [1, 2, 5, 10]
|
||||
# , 'clf__base_estimator' : ['SVC']
|
||||
# , 'clf__splitter' : ["best", "random"]
|
||||
}
|
||||
]
|
||||
########################################################################
|
||||
#======================
|
||||
# BaggingClassifier()
|
||||
#======================
|
||||
estimator = BaggingClassifier(**rs
|
||||
, **njobs
|
||||
, bootstrap = True
|
||||
, oob_score = True)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_bc = Pipeline([
|
||||
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_bc = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [BaggingClassifier(**rs, **njobs , bootstrap = True, oob_score = True)],
|
||||
'clf__n_estimators' : [10, 25, 50, 100, 150, 200, 500, 700, 1000]
|
||||
# , 'clf__base_estimator' : ['None', 'SVC()', 'KNeighborsClassifier()'] # if none, DT is used
|
||||
}
|
||||
]
|
||||
########################################################################
|
||||
#======================
|
||||
# BernoulliNB ()
|
||||
#======================
|
||||
# Define estimator
|
||||
estimator = BernoulliNB()
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_bnb = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_bnb = [
|
||||
{'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [BernoulliNB()],
|
||||
'clf__alpha': [1, 0]
|
||||
, 'clf__binarize':[None, 0]
|
||||
, 'clf__fit_prior': [True]
|
||||
, 'clf__class_prior': [None]
|
||||
}
|
||||
]
|
||||
########################################################################
|
||||
#===========================
|
||||
# DecisionTreeClassifier()
|
||||
#===========================
|
||||
|
||||
# Define estimator
|
||||
estimator = DecisionTreeClassifier(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_dt = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_dt = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [DecisionTreeClassifier(**rs)],
|
||||
'clf__max_depth': [None, 2, 4, 6, 8, 10, 12, 16, 20]
|
||||
, 'clf__class_weight':['balanced']
|
||||
, 'clf__criterion': ['gini', 'entropy', 'log_loss']
|
||||
, 'clf__max_features': [None, 'sqrt', 'log2']
|
||||
, 'clf__min_samples_leaf': [1, 2, 3, 4, 5, 10]
|
||||
, 'clf__min_samples_split': [2, 5, 15, 20]
|
||||
}
|
||||
]
|
||||
|
||||
#########################################################################
|
||||
#==============================
|
||||
# GradientBoostingClassifier()
|
||||
#==============================
|
||||
# Define estimator
|
||||
estimator = GradientBoostingClassifier(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_gbc = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_gbc = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
{
|
||||
# 'clf': [GradientBoostingClassifier(**rs)],
|
||||
'clf__n_estimators' : [10, 100, 200, 500, 1000]
|
||||
, 'clf__n_estimators' : [10, 100, 1000]
|
||||
, 'clf__learning_rate': [0.001, 0.01, 0.1]
|
||||
, 'clf__subsample' : [0.5, 0.7, 1.0]
|
||||
, 'clf__max_depth' : [3, 7, 9]
|
||||
|
||||
}
|
||||
]
|
||||
|
||||
#########################################################################
|
||||
#===========================
|
||||
# GaussianNB ()
|
||||
#===========================
|
||||
# Define estimator
|
||||
estimator = GaussianNB()
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_gnb = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_gnb = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
{
|
||||
# 'clf': [GaussianNB()],
|
||||
'clf__priors': [None]
|
||||
, 'clf__var_smoothing': np.logspace(0,-9, num=100)
|
||||
}
|
||||
]
|
||||
|
||||
#########################################################################
|
||||
#===========================
|
||||
# GaussianProcessClassifier()
|
||||
#===========================
|
||||
# Define estimator
|
||||
estimator = GaussianProcessClassifier(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_gbc = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_gbc = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [GaussianProcessClassifier(**rs)],
|
||||
'clf__kernel': [1*RBF(), 1*DotProduct(), 1*Matern(), 1*RationalQuadratic(), 1*WhiteKernel()]
|
||||
}
|
||||
]
|
||||
|
||||
#########################################################################
|
||||
#===========================
|
||||
# KNeighborsClassifier ()
|
||||
#===========================
|
||||
# Define estimator
|
||||
estimator = KNeighborsClassifier(**njobs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_knn = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_knn = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [KNeighborsClassifier(**njobs)],
|
||||
'clf__n_neighbors': range(21, 51, 2)
|
||||
#, 'clf__n_neighbors': [5, 7, 11]
|
||||
, 'clf__metric' : ['euclidean', 'manhattan', 'minkowski']
|
||||
, 'clf__weights' : ['uniform', 'distance']
|
||||
|
||||
}
|
||||
]
|
||||
#########################################################################
|
||||
#===========================
|
||||
# LogisticRegression ()
|
||||
#===========================
|
||||
# Define estimator
|
||||
estimator = LogisticRegression(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_lr = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(LogisticRegression(**rs), cv = rskf_cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_lr = [
|
||||
|
||||
{'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [rskf_cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [LogisticRegression(**rs)],
|
||||
'clf__C': np.logspace(0, 4, 10),
|
||||
'clf__penalty': ['none', 'l1', 'l2', 'elasticnet'],
|
||||
'clf__max_iter': list(range(100,800,100)),
|
||||
'clf__solver': ['saga']
|
||||
},
|
||||
{
|
||||
# 'clf': [LogisticRegression(**rs)],
|
||||
'clf__C': np.logspace(0, 4, 10),
|
||||
'clf__penalty': ['l2', 'none'],
|
||||
'clf__max_iter': list(range(100,800,100)),
|
||||
'clf__solver': ['newton-cg', 'lbfgs', 'sag']
|
||||
},
|
||||
{
|
||||
# 'clf': [LogisticRegression(**rs)],
|
||||
'clf__C': np.logspace(0, 4, 10),
|
||||
'clf__penalty': ['l1', 'l2'],
|
||||
'clf__max_iter': list(range(100,800,100)),
|
||||
'clf__solver': ['liblinear']
|
||||
}
|
||||
|
||||
]
|
||||
#########################################################################
|
||||
#==================
|
||||
# MLPClassifier()
|
||||
#==================
|
||||
# Define estimator
|
||||
estimator = MLPClassifier(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_mlp = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
param_grid_mlp = [ {
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [MLPClassifier(**rs, max_iter = 1000)],
|
||||
'clf__max_iter': [1000, 2000]
|
||||
, 'clf__hidden_layer_sizes': [(1), (2), (3), (5), (10)]
|
||||
, 'clf__solver': ['lbfgs', 'sgd', 'adam']
|
||||
, 'clf__learning_rate': ['constant', 'invscaling', 'adaptive']
|
||||
#, 'clf__learning_rate': ['constant']
|
||||
|
||||
}
|
||||
]
|
||||
|
||||
#########################################################################
|
||||
#==================================
|
||||
# QuadraticDiscriminantAnalysis()
|
||||
#==================================
|
||||
# Define estimator
|
||||
estimator = QuadraticDiscriminantAnalysis(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_qda = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_qda = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [QuadraticDiscriminantAnalysis()],
|
||||
'clf__priors': [None]
|
||||
|
||||
}
|
||||
]
|
||||
|
||||
#########################################################################
|
||||
#====================
|
||||
# RidgeClassifier()
|
||||
#====================
|
||||
|
||||
# Define estimator
|
||||
estimator = RidgeClassifier(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_abc = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
param_grid_rc = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
#'clf' : [RidgeClassifier(**rs)],
|
||||
'clf__alpha': [0.1, 0.2, 0.5, 0.8, 1.0]
|
||||
}
|
||||
]
|
||||
#######################################################################
|
||||
#===========================
|
||||
# RandomForestClassifier()
|
||||
#===========================
|
||||
# Define estimator
|
||||
estimator = [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)](**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_rf = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_rf = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)],
|
||||
'clf__max_depth': [4, 6, 8, 10, 12, 16, 20, None]
|
||||
, 'clf__class_weight':['balanced','balanced_subsample']
|
||||
, 'clf__n_estimators': [10, 25, 50, 100, 200, 300] # go upto a 100
|
||||
, 'clf__criterion': ['gini', 'entropy', 'log_loss']
|
||||
, 'clf__max_features': ['sqrt', 'log2', None] #deafult is sqrt
|
||||
, 'clf__min_samples_leaf': [1, 2, 3, 4, 5, 10]
|
||||
, 'clf__min_samples_split': [2, 5, 15, 20]
|
||||
}
|
||||
]
|
||||
#######################################################################
|
||||
#========
|
||||
# SVC()
|
||||
#========
|
||||
|
||||
estimator = SVC(**rs)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_svc = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
# Define hyperparmeter space to search for
|
||||
param_grid_svc = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
|
||||
{
|
||||
# 'clf': [SVC(**rs)],
|
||||
'clf__kernel': ['poly', 'rbf', 'sigmoid']
|
||||
#, 'clf__kernel': ['linear']
|
||||
, 'clf__C' : [50, 10, 1.0, 0.1, 0.01]
|
||||
, 'clf__gamma': ['scale', 'auto']
|
||||
|
||||
}
|
||||
]
|
||||
|
||||
#######################################################################
|
||||
#=================
|
||||
# XGBClassifier ()
|
||||
#=================
|
||||
|
||||
# Define estimator
|
||||
#https://www.datatechnotes.com/2019/07/classification-example-with.html
|
||||
# XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
|
||||
# colsample_bynode=1, colsample_bytree=1, gamma=0, learning_rate=0.1,
|
||||
# max_delta_step=0, max_depth=3, min_child_weight=1, missing=None,
|
||||
# n_estimators=100, n_jobs=1, nthread=None,
|
||||
# objective='multi:softprob', random_state=0, reg_alpha=0,
|
||||
# reg_lambda=1, scale_pos_weight=1, seed=None, silent=None,
|
||||
# subsample=1, verbosity=1)
|
||||
estimator = XGBClassifier(**rs, **njobs, verbose = 3)
|
||||
|
||||
# Define pipleline with steps
|
||||
pipe_xgb = Pipeline([
|
||||
('pre', MinMaxScaler())
|
||||
, ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef'))
|
||||
# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef'))
|
||||
, ('clf', estimator)
|
||||
])
|
||||
|
||||
param_grid_xgb = [
|
||||
{
|
||||
'fs__min_features_to_select' : [1,2]
|
||||
# , 'fs__cv': [cv]
|
||||
},
|
||||
{
|
||||
# 'clf': [XGBClassifier(**rs , **njobs, verbose = 3)],
|
||||
'clf__learning_rate': [0.01, 0.05, 0.1, 0.2]
|
||||
, 'clf__max_depth' : [4, 6, 8, 10, 12, 16, 20]
|
||||
, 'clf__n_estimators': [10, 25, 50, 100, 200, 300]
|
||||
#, 'clf__min_samples_leaf': [4, 8, 12, 16, 20]
|
||||
#, 'clf__max_features': ['auto', 'sqrt']
|
||||
}
|
||||
]
|
||||
|
||||
#######################################################################
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue