diff --git a/UQ_FS_fn_CALL.py b/UQ_FS_fn_CALL.py index 7fcf59b..e0f8b62 100644 --- a/UQ_FS_fn_CALL.py +++ b/UQ_FS_fn_CALL.py @@ -25,8 +25,16 @@ ds_lrD = fsgs(input_df = X , estimator = LogisticRegression(**rs) , var_type = 'mixed') +# RF: without fs + hyperparam +rf_allF = fsgs(input_df = X + , target = y + , param_gridLd = param_grid_rf + , blind_test_df = X_bts + , blind_test_target = y_bts + , estimator = RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True) + , var_type = 'mixed') - +#Fitting 10 folds for each of 31104 candidates, totalling 311040 fits @@ -44,4 +52,5 @@ ds_lrD = fsgs(input_df = X # file = 'LR_FS.json' # with open(file, 'r') as f: # data = json.load(f) -############################################################################## \ No newline at end of file +############################################################################## + diff --git a/UQ_FS_mixed_eg.py b/UQ_FS_mixed_eg.py index 668f14d..358cd4a 100644 --- a/UQ_FS_mixed_eg.py +++ b/UQ_FS_mixed_eg.py @@ -37,6 +37,7 @@ col_transform = ColumnTransformer(transformers = t # print(col_transform.get_feature_names_out()) # foo = col_transform.fit_transform(X) +Xm = col_transform.fit_transform(X) # (foo == test).all() #----------------------- diff --git a/UQ_pnca_ML.py b/UQ_pnca_ML.py index d86c853..d0a4099 100644 --- a/UQ_pnca_ML.py +++ b/UQ_pnca_ML.py @@ -49,7 +49,6 @@ from sklearn.compose import make_column_transformer from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score from sklearn.metrics import roc_auc_score, roc_curve, f1_score, matthews_corrcoef, jaccard_score from sklearn.metrics import jaccard_score - from sklearn.metrics import make_scorer from sklearn.metrics import classification_report @@ -62,8 +61,8 @@ from sklearn.model_selection import StratifiedKFold from sklearn.pipeline import Pipeline from sklearn.pipeline import make_pipeline -from sklearn.feature_selection import RFE -from sklearn.feature_selection import RFECV +#from sklearn.feature_selection import RFE +#from sklearn.feature_selection import RFECV import itertools #import seaborn as sns import matplotlib.pyplot as plt diff --git a/classification_params.py b/classification_params.py deleted file mode 100644 index 0991e25..0000000 --- a/classification_params.py +++ /dev/null @@ -1,479 +0,0 @@ -######################################################################## -#====================== -# AdaBoostClassifier() -#====================== -estimator = AdaBoostClassifier(**rs) - -# Define pipleline with steps -pipe_abc = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) -# , ('clf', AdaBoostClassifier(**rs))]) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_abc = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [AdaBoostClassifier(**rs)], - 'clf__n_estimators': [1, 2, 5, 10] -# , 'clf__base_estimator' : ['SVC'] -# , 'clf__splitter' : ["best", "random"] - } -] -######################################################################## -#====================== -# BaggingClassifier() -#====================== -estimator = BaggingClassifier(**rs - , **njobs - , bootstrap = True - , oob_score = True) - -# Define pipleline with steps -pipe_bc = Pipeline([ - - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_bc = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [BaggingClassifier(**rs, **njobs , bootstrap = True, oob_score = True)], - 'clf__n_estimators' : [10, 25, 50, 100, 150, 200, 500, 700, 1000] -# , 'clf__base_estimator' : ['None', 'SVC()', 'KNeighborsClassifier()'] # if none, DT is used - } -] -######################################################################## -#====================== -# BernoulliNB () -#====================== -# Define estimator -estimator = BernoulliNB() - -# Define pipleline with steps -pipe_bnb = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_bnb = [ - {'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [BernoulliNB()], - 'clf__alpha': [1, 0] - , 'clf__binarize':[None, 0] - , 'clf__fit_prior': [True] - , 'clf__class_prior': [None] - } -] -######################################################################## -#=========================== -# DecisionTreeClassifier() -#=========================== - -# Define estimator -estimator = DecisionTreeClassifier(**rs) - -# Define pipleline with steps -pipe_dt = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_dt = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [DecisionTreeClassifier(**rs)], - 'clf__max_depth': [None, 2, 4, 6, 8, 10, 12, 16, 20] - , 'clf__class_weight':['balanced'] - , 'clf__criterion': ['gini', 'entropy', 'log_loss'] - , 'clf__max_features': [None, 'sqrt', 'log2'] - , 'clf__min_samples_leaf': [1, 2, 3, 4, 5, 10] - , 'clf__min_samples_split': [2, 5, 15, 20] - } -] - -######################################################################### -#============================== -# GradientBoostingClassifier() -#============================== -# Define estimator -estimator = GradientBoostingClassifier(**rs) - -# Define pipleline with steps -pipe_gbc = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_gbc = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - { -# 'clf': [GradientBoostingClassifier(**rs)], - 'clf__n_estimators' : [10, 100, 200, 500, 1000] - , 'clf__learning_rate': [0.001, 0.01, 0.1] - , 'clf__subsample' : [0.5, 0.7, 1.0] - , 'clf__max_depth' : [3, 7, 9] - - } -] - -######################################################################### -#=========================== -# GaussianNB () -#=========================== -# Define estimator -estimator = GaussianNB() - -# Define pipleline with steps -pipe_gnb = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - - -# Define hyperparmeter space to search for -param_grid_gnb = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - { -# 'clf': [GaussianNB()], - 'clf__priors': [None] - , 'clf__var_smoothing': np.logspace(0,-9, num=100) - } -] - -######################################################################### -#=========================== -# GaussianProcessClassifier() -#=========================== -# Define estimator -estimator = GaussianProcessClassifier(**rs) - -# Define pipleline with steps -pipe_gbc = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_gbc = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [GaussianProcessClassifier(**rs)], - 'clf__kernel': [1*RBF(), 1*DotProduct(), 1*Matern(), 1*RationalQuadratic(), 1*WhiteKernel()] - } -] - -######################################################################### -#=========================== -# KNeighborsClassifier () -#=========================== -# Define estimator -estimator = KNeighborsClassifier(**njobs) - -# Define pipleline with steps -pipe_knn = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_knn = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [KNeighborsClassifier(**njobs)], - 'clf__n_neighbors': range(21, 51, 2) - #, 'clf__n_neighbors': [5, 7, 11] - , 'clf__metric' : ['euclidean', 'manhattan', 'minkowski'] - , 'clf__weights' : ['uniform', 'distance'] - - } -] -######################################################################### -#=========================== -# LogisticRegression () -#=========================== -# Define estimator -estimator = LogisticRegression(**rs) - -# Define pipleline with steps -pipe_lr = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(LogisticRegression(**rs), cv = rskf_cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator)]) - -# Define hyperparmeter space to search for -param_grid_lr = [ - - {'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [rskf_cv] - }, - - { -# 'clf': [LogisticRegression(**rs)], - 'clf__C': np.logspace(0, 4, 10), - 'clf__penalty': ['none', 'l1', 'l2', 'elasticnet'], - 'clf__max_iter': list(range(100,800,100)), - 'clf__solver': ['saga'] - }, - { -# 'clf': [LogisticRegression(**rs)], - 'clf__C': np.logspace(0, 4, 10), - 'clf__penalty': ['l2', 'none'], - 'clf__max_iter': list(range(100,800,100)), - 'clf__solver': ['newton-cg', 'lbfgs', 'sag'] - }, - { -# 'clf': [LogisticRegression(**rs)], - 'clf__C': np.logspace(0, 4, 10), - 'clf__penalty': ['l1', 'l2'], - 'clf__max_iter': list(range(100,800,100)), - 'clf__solver': ['liblinear'] - } - -] -######################################################################### -#================== -# MLPClassifier() -#================== -# Define estimator -estimator = MLPClassifier(**rs) - -# Define pipleline with steps -pipe_mlp = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -param_grid_mlp = [ { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [MLPClassifier(**rs, max_iter = 1000)], - 'clf__max_iter': [1000, 2000] - , 'clf__hidden_layer_sizes': [(1), (2), (3), (5), (10)] - , 'clf__solver': ['lbfgs', 'sgd', 'adam'] - , 'clf__learning_rate': ['constant', 'invscaling', 'adaptive'] - #, 'clf__learning_rate': ['constant'] - - } -] - -######################################################################### -#================================== -# QuadraticDiscriminantAnalysis() -#================================== -# Define estimator -estimator = QuadraticDiscriminantAnalysis(**rs) - -# Define pipleline with steps -pipe_qda = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_qda = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [QuadraticDiscriminantAnalysis()], - 'clf__priors': [None] - - } -] - -######################################################################### -#==================== -# RidgeClassifier() -#==================== - -# Define estimator -estimator = RidgeClassifier(**rs) - -# Define pipleline with steps -pipe_rc = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -param_grid_rc = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { - #'clf' : [RidgeClassifier(**rs)], - 'clf__alpha': [0.1, 0.2, 0.5, 0.8, 1.0] - } -] -####################################################################### -#=========================== -# RandomForestClassifier() -#=========================== -# Define estimator -estimator = [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)](**rs) - -# Define pipleline with steps -pipe_rf = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_rf = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)], - 'clf__max_depth': [4, 6, 8, 10, 12, 16, 20, None] - , 'clf__class_weight':['balanced','balanced_subsample'] - , 'clf__n_estimators': [10, 25, 50, 100, 200, 300] # go upto a 100 - , 'clf__criterion': ['gini', 'entropy', 'log_loss'] - , 'clf__max_features': ['sqrt', 'log2', None] #deafult is sqrt - , 'clf__min_samples_leaf': [1, 2, 3, 4, 5, 10] - , 'clf__min_samples_split': [2, 5, 15, 20] - } -] -####################################################################### -#======== -# SVC() -#======== - -estimator = SVC(**rs) - -# Define pipleline with steps -pipe_svc = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_svc = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [SVC(**rs)], - 'clf__kernel': ['poly', 'rbf', 'sigmoid'] - #, 'clf__kernel': ['linear'] - , 'clf__C' : [50, 10, 1.0, 0.1, 0.01] - , 'clf__gamma': ['scale', 'auto'] - - } -] - -####################################################################### -#================= -# XGBClassifier () -#================= - -# Define estimator -#https://www.datatechnotes.com/2019/07/classification-example-with.html -# XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1, -# colsample_bynode=1, colsample_bytree=1, gamma=0, learning_rate=0.1, -# max_delta_step=0, max_depth=3, min_child_weight=1, missing=None, -# n_estimators=100, n_jobs=1, nthread=None, -# objective='multi:softprob', random_state=0, reg_alpha=0, -# reg_lambda=1, scale_pos_weight=1, seed=None, silent=None, -# subsample=1, verbosity=1) -estimator = XGBClassifier(**rs, **njobs, verbose = 3) - -# Define pipleline with steps -pipe_xgb = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -param_grid_xgb = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - { -# 'clf': [XGBClassifier(**rs , **njobs, verbose = 3)], - 'clf__learning_rate': [0.01, 0.05, 0.1, 0.2] - , 'clf__max_depth' : [4, 6, 8, 10, 12, 16, 20] - , 'clf__n_estimators': [10, 25, 50, 100, 200, 300] - #, 'clf__min_samples_leaf': [4, 8, 12, 16, 20] - #, 'clf__max_features': ['auto', 'sqrt'] - } -] - -####################################################################### - diff --git a/classification_params_FS.py b/classification_params_FS.py index 0991e25..2e6e450 100644 --- a/classification_params_FS.py +++ b/classification_params_FS.py @@ -1,6 +1,31 @@ +# Date: 25/05/2020 +# https://scikit-learn.org/stable/supervised_learning.html + + +# try features from Autosklearn: +# autosklearn --> pipleine --> components --> classification +# https://github.com/automl/auto-sklearn/tree/master/autosklearn/pipeline/components/classification + +# TOADD: +# Extra Trees +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/extra_trees.py +# LDA +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/lda.py +# Multinomial_nb +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/multinomial_nb.py +# passive_aggressive +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/passive_aggressive.py +# SGD +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/sgd.py + + +######https://scikit-learn.org/stable/supervised_learning.html + ######################################################################## #====================== # AdaBoostClassifier() +#https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/adaboost.py +#https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.AdaBoostClassifier.html #====================== estimator = AdaBoostClassifier(**rs) @@ -22,14 +47,162 @@ param_grid_abc = [ { # 'clf': [AdaBoostClassifier(**rs)], - 'clf__n_estimators': [1, 2, 5, 10] -# , 'clf__base_estimator' : ['SVC'] -# , 'clf__splitter' : ["best", "random"] +# 'clf__n_estimators': [50, 100, 150, 200, 250, 300, 350, 400, 450, 500] + 'clf__n_estimators': [50, 100, 200, 300, 400, 500], + 'clf__learning_rate': [0.01, 0.1, 1, 1.5, 2], + 'clf__max_depth': [1, 5, 10], +# 'clf__base_estimator' : ['SVC'] } ] +#====================== +# Extra TreesClassifier() +#https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/extra_trees.py +#https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.ExtraTreesClassifier.html +#====================== +estimator = ExtraTreesClassifier**rs) + +# Define pipleline with steps +pipe_abc = Pipeline([ + ('pre', MinMaxScaler()) + , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) +# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) +# , ('clf', ExtraTreesClassifier(**rs))]) + , ('clf', estimator) + ]) + +# Define hyperparmeter space to search for +param_grid_abc = [ + { + 'fs__min_features_to_select' : [1,2] +# , 'fs__cv': [cv] + }, + +# 'clf': [ExtraTreesClassifier(**rs)], + 'clf__n_estimators': [100, 300, 500], # sklearn has no tuning + 'clf__max_depth': [None], + 'clf__criterion': ['gini', 'entropy'], + 'clf__max_features': [None, 'sqrt', 'log2', 0.5, 1], + 'clf__min_samples_leaf': [1, 5, 10, 15, 20], + 'clf__min_samples_split': [2, 5, 10, 15, 20] + } +] + +#=========================== +# DecisionTreeClassifier() +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/decision_tree.py +https://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeClassifier.html +#=========================== +# Define estimator +estimator = DecisionTreeClassifier(**rs) + +# Define pipleline with steps +pipe_dt = Pipeline([ + ('pre', MinMaxScaler()) + , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) +# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) + , ('clf', estimator) + ]) + +# Define hyperparmeter space to search for +param_grid_dt = [ + { + 'fs__min_features_to_select' : [1,2] +# , 'fs__cv': [cv] + }, + + { +# 'clf': [DecisionTreeClassifier(**rs)], +# 'clf__max_depth': [None, 2, 6, 10, 14, 16, 20], + 'clf__max_depth': [None, 0, 0.2, 0.5], + 'clf__class_weight':[None, 'balanced'], + 'clf__criterion': ['gini', 'entropy'], + 'clf__max_features': [None, 'sqrt', 'log2', 1], + 'clf__min_samples_leaf': [1, 5, 10, 15, 20], + 'clf__min_samples_split': [2, 5, 10, 15, 20] + } +] + +######################################################################## +#=========================== +# RandomForestClassifier() +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/random_forest.py +https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html +#=========================== + +# Define estimator +estimator = [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)](**rs) + +# Define pipleline with steps +pipe_rf = Pipeline([ + ('pre', MinMaxScaler()) + , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) +# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) + , ('clf', estimator) + ]) + +# Define hyperparmeter space to search for +param_grid_rf = [ + { + 'fs__min_features_to_select' : [1,2] +# , 'fs__cv': [cv] + }, + + { +# 'clf': [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)], +# 'clf__max_depth': [4, 6, 8, 10, 12, 16, 20, None] + 'clf__max_depth': [None, 2, 6, 10, 14, 16, 20] #autosk: None + , 'clf__class_weight':[None, 'balanced'] + , 'clf__n_estimators': [50, 100, 200, 300] # autodesk: no + , 'clf__criterion': ['gini', 'entropy'] + , 'clf__max_features': ['sqrt', 'log2', None, 0, 0.5, 1] + , 'clf__min_samples_leaf': [1, 5, 10, 15, 20] + , 'clf__min_samples_split': [2, 5, 15, 20] + } +] + +#================= +# XGBClassifier () +#================= +# https://www.kaggle.com/code/stuarthallows/using-xgboost-with-scikit-learn/notebook +# Define estimator +#https://www.datatechnotes.com/2019/07/classification-example-with.html +# XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1, +# colsample_bynode=1, colsample_bytree=1, gamma=0, learning_rate=0.1, +# max_delta_step=0, max_depth=3, min_child_weight=1, missing=None, +# n_estimators=100, n_jobs=1, nthread=None, +# objective='multi:softprob', random_state=0, reg_alpha=0, +# reg_lambda=1, scale_pos_weight=1, seed=None, silent=None, +# subsample=1, verbosity=1) +estimator = XGBClassifier(**rs, **njobs, verbose = 3) + +# Define pipleline with steps +pipe_xgb = Pipeline([ + ('pre', MinMaxScaler()) + , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) +# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) + , ('clf', estimator) + ]) + +param_grid_xgb = [ + { + 'fs__min_features_to_select' : [1,2] +# , 'fs__cv': [cv] + }, + { +# 'clf': [XGBClassifier(**rs , **njobs, verbose = 3)], + 'clf__learning_rate': [0.01, 0.05, 0.1, 0.2] + , 'clf__max_depth' : [3, 8, 10, 12, 16, 20] + , 'clf__n_estimators': [50, 100, 200, 300] + } +] + +####################################################################### + + ######################################################################## #====================== -# BaggingClassifier() +# BaggingClassifier()* +#https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.BaggingClassifier.html #====================== estimator = BaggingClassifier(**rs , **njobs @@ -53,7 +226,7 @@ param_grid_bc = [ }, { -# 'clf': [BaggingClassifier(**rs, **njobs , bootstrap = True, oob_score = True)], +# 'clf': [BaggingClassifier(**rs, **njobs , bootstrap = True, oob_score = True)], 'clf__n_estimators' : [10, 25, 50, 100, 150, 200, 500, 700, 1000] # , 'clf__base_estimator' : ['None', 'SVC()', 'KNeighborsClassifier()'] # if none, DT is used } @@ -61,6 +234,8 @@ param_grid_bc = [ ######################################################################## #====================== # BernoulliNB () +#https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/bernoulli_nb.py +#https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.BernoulliNB.html #====================== # Define estimator estimator = BernoulliNB() @@ -81,49 +256,18 @@ param_grid_bnb = [ { # 'clf': [BernoulliNB()], - 'clf__alpha': [1, 0] - , 'clf__binarize':[None, 0] + 'clf__alpha': [0.01, 0, 1, 10, 100] + , 'clf__binarize':[None, 0] # autosk has no, maybe just use None , 'clf__fit_prior': [True] , 'clf__class_prior': [None] } ] -######################################################################## -#=========================== -# DecisionTreeClassifier() -#=========================== - -# Define estimator -estimator = DecisionTreeClassifier(**rs) - -# Define pipleline with steps -pipe_dt = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_dt = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [DecisionTreeClassifier(**rs)], - 'clf__max_depth': [None, 2, 4, 6, 8, 10, 12, 16, 20] - , 'clf__class_weight':['balanced'] - , 'clf__criterion': ['gini', 'entropy', 'log_loss'] - , 'clf__max_features': [None, 'sqrt', 'log2'] - , 'clf__min_samples_leaf': [1, 2, 3, 4, 5, 10] - , 'clf__min_samples_split': [2, 5, 15, 20] - } -] ######################################################################### #============================== # GradientBoostingClassifier() +#https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/gradient_boosting.py +#https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.GradientBoostingClassifier.html #============================== # Define estimator estimator = GradientBoostingClassifier(**rs) @@ -144,17 +288,26 @@ param_grid_gbc = [ }, { # 'clf': [GradientBoostingClassifier(**rs)], - 'clf__n_estimators' : [10, 100, 200, 500, 1000] - , 'clf__learning_rate': [0.001, 0.01, 0.1] - , 'clf__subsample' : [0.5, 0.7, 1.0] - , 'clf__max_depth' : [3, 7, 9] + 'clf__loss' : ['log_loss', 'exponential'], + 'clf__n_estimators' : [10, 100, 200, 500, 1000], # autosklearn: not there + 'clf__learning_rate' : [0.01,0.1, 0, 0.5, 1], + 'clf__subsample' : [0.5, 0.7, 1.0], + 'clf__max_depth' : [3, 7, 9], + 'clf__min_samples_leaf' : [1, 20, 50, 100, 150, 200], + 'clf__max_depth' : [None], + 'clf__max_leaf_nodes' : [3, 31, 51, 331, 2047] # autosklearn: log = T + 'clf__l2_regularization' : [0.0000000001, 0.000001, 0.0001, 0.01, 0.1, 1], #lower=1E-10, upper=1, log = T + 'n_iter_no_change' : [None, 1, 5, 10, 15, 20], # autsk: 1, 20 + 'validation_fraction' : [0.01, 0.03, 0.2, 0.3, 0.4] # autosk: 0.01, 0.4 } ] ######################################################################### #=========================== -# GaussianNB () +# GaussianNB() +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/gaussian_nb.py +https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.GaussianNB.html #=========================== # Define estimator estimator = GaussianNB() @@ -183,7 +336,8 @@ param_grid_gnb = [ ######################################################################### #=========================== -# GaussianProcessClassifier() +# GaussianProcessClassifier() * +# https://scikit-learn.org/stable/modules/generated/sklearn.gaussian_process.GaussianProcessClassifier.html #=========================== # Define estimator estimator = GaussianProcessClassifier(**rs) @@ -212,6 +366,8 @@ param_grid_gbc = [ ######################################################################### #=========================== # KNeighborsClassifier () +#https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/k_nearest_neighbors.py +#https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html #=========================== # Define estimator estimator = KNeighborsClassifier(**njobs) @@ -233,16 +389,18 @@ param_grid_knn = [ { # 'clf': [KNeighborsClassifier(**njobs)], - 'clf__n_neighbors': range(21, 51, 2) #, 'clf__n_neighbors': [5, 7, 11] - , 'clf__metric' : ['euclidean', 'manhattan', 'minkowski'] - , 'clf__weights' : ['uniform', 'distance'] + #'clf__n_neighbors': list(range(21, 51, 4),) + 'clf__n_neighbors' : [1, 11, 21, 51, 71, 101], + 'clf__metric' : ['euclidean', 'manhattan', 'minkowski'], + 'clf__weights' : ['uniform', 'distance'] } ] ######################################################################### #=========================== -# LogisticRegression () +# LogisticRegression () * +# https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html #=========================== # Define estimator estimator = LogisticRegression(**rs) @@ -287,6 +445,8 @@ param_grid_lr = [ ######################################################################### #================== # MLPClassifier() +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/mlp.py +https://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html #================== # Define estimator estimator = MLPClassifier(**rs) @@ -306,11 +466,10 @@ param_grid_mlp = [ { { # 'clf': [MLPClassifier(**rs, max_iter = 1000)], - 'clf__max_iter': [1000, 2000] - , 'clf__hidden_layer_sizes': [(1), (2), (3), (5), (10)] - , 'clf__solver': ['lbfgs', 'sgd', 'adam'] - , 'clf__learning_rate': ['constant', 'invscaling', 'adaptive'] - #, 'clf__learning_rate': ['constant'] + 'clf__max_iter': [200, 500, 1000, 2000], # no autosklearn + 'clf__hidden_layer_sizes': [(100), (1), (2), (3), (5), (10) ], #no autosklearn + 'clf__solver': ['lbfgs', 'sgd', 'adam'], #no autosklearn + 'clf__learning_rate': ['constant', 'invscaling', 'adaptive'] #no autosklearn } ] @@ -318,6 +477,8 @@ param_grid_mlp = [ { ######################################################################### #================================== # QuadraticDiscriminantAnalysis() +https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/qda.py +https://scikit-learn.org/stable/modules/generated/sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis.html #================================== # Define estimator estimator = QuadraticDiscriminantAnalysis(**rs) @@ -339,14 +500,15 @@ param_grid_qda = [ { # 'clf': [QuadraticDiscriminantAnalysis()], - 'clf__priors': [None] - + 'clf__priors': [None], + 'clf__reg_param': [0, 1] } ] ######################################################################### #==================== -# RidgeClassifier() +# RidgeClassifier() * +https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.RidgeClassifier.html #==================== # Define estimator @@ -372,41 +534,14 @@ param_grid_rc = [ } ] ####################################################################### -#=========================== -# RandomForestClassifier() -#=========================== -# Define estimator -estimator = [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)](**rs) - -# Define pipleline with steps -pipe_rf = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -# Define hyperparmeter space to search for -param_grid_rf = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - - { -# 'clf': [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)], - 'clf__max_depth': [4, 6, 8, 10, 12, 16, 20, None] - , 'clf__class_weight':['balanced','balanced_subsample'] - , 'clf__n_estimators': [10, 25, 50, 100, 200, 300] # go upto a 100 - , 'clf__criterion': ['gini', 'entropy', 'log_loss'] - , 'clf__max_features': ['sqrt', 'log2', None] #deafult is sqrt - , 'clf__min_samples_leaf': [1, 2, 3, 4, 5, 10] - , 'clf__min_samples_split': [2, 5, 15, 20] - } -] -####################################################################### #======== # SVC() +# https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/libsvm_svc.py +# paper that supports libSVM/SVC param searching +# https://www.csie.ntu.edu.tw/~cjlin/papers/guide/guide.pdf +https://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html + +##########https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/liblinear_svc.py (NOT the one used, but they are very similar!) #======== estimator = SVC(**rs) @@ -428,52 +563,15 @@ param_grid_svc = [ { # 'clf': [SVC(**rs)], - 'clf__kernel': ['poly', 'rbf', 'sigmoid'] - #, 'clf__kernel': ['linear'] - , 'clf__C' : [50, 10, 1.0, 0.1, 0.01] - , 'clf__gamma': ['scale', 'auto'] +# 'clf__kernel': ['poly', 'rbf', 'sigmoid'] + , 'clf__kernel': ['rbf'] + , 'clf__C' : [50, 10, 1.0, 0.1, 0.01] + , 'clf__C' : [1, 0.03, 10, 100, 1000, 10000, 32768] + , 'clf__gamma' : ['scale', 'auto'] } ] ####################################################################### -#================= -# XGBClassifier () -#================= -# Define estimator -#https://www.datatechnotes.com/2019/07/classification-example-with.html -# XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1, -# colsample_bynode=1, colsample_bytree=1, gamma=0, learning_rate=0.1, -# max_delta_step=0, max_depth=3, min_child_weight=1, missing=None, -# n_estimators=100, n_jobs=1, nthread=None, -# objective='multi:softprob', random_state=0, reg_alpha=0, -# reg_lambda=1, scale_pos_weight=1, seed=None, silent=None, -# subsample=1, verbosity=1) -estimator = XGBClassifier(**rs, **njobs, verbose = 3) - -# Define pipleline with steps -pipe_xgb = Pipeline([ - ('pre', MinMaxScaler()) - , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) -# , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) - , ('clf', estimator) - ]) - -param_grid_xgb = [ - { - 'fs__min_features_to_select' : [1,2] -# , 'fs__cv': [cv] - }, - { -# 'clf': [XGBClassifier(**rs , **njobs, verbose = 3)], - 'clf__learning_rate': [0.01, 0.05, 0.1, 0.2] - , 'clf__max_depth' : [4, 6, 8, 10, 12, 16, 20] - , 'clf__n_estimators': [10, 25, 50, 100, 200, 300] - #, 'clf__min_samples_leaf': [4, 8, 12, 16, 20] - #, 'clf__max_features': ['auto', 'sqrt'] - } -] - -#######################################################################