# Date: 25/05/2020 # https://scikit-learn.org/stable/supervised_learning.html # try features from Autosklearn: # autosklearn --> pipleine --> components --> classification # https://github.com/automl/auto-sklearn/tree/master/autosklearn/pipeline/components/classification # TOADD: # LDA https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/lda.py # Multinomial_nb https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/multinomial_nb.py # passive_aggressive https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/passive_aggressive.py # SGD https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/sgd.py ######https://scikit-learn.org/stable/supervised_learning.html ######################################################################## #====================== # AdaBoostClassifier() #https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/adaboost.py #https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.AdaBoostClassifier.html #====================== estimator = AdaBoostClassifier(**rs) # Define pipleline with steps pipe_abc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) # , ('clf', AdaBoostClassifier(**rs))]) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_abc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [AdaBoostClassifier(**rs)], # 'clf__n_estimators': [50, 100, 150, 200, 250, 300, 350, 400, 450, 500] 'clf__n_estimators': [50, 100, 200, 300, 400, 500], 'clf__learning_rate': [0.01, 0.1, 1, 1.5, 2], 'clf__max_depth': [1, 5, 10], # 'clf__base_estimator' : ['SVC'] } ] #====================== # Extra TreesClassifier() #https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/extra_trees.py #https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.ExtraTreesClassifier.html #====================== estimator = ExtraTreesClassifier**rs) # Define pipleline with steps pipe_abc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) # , ('clf', ExtraTreesClassifier(**rs))]) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_abc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, # 'clf': [ExtraTreesClassifier(**rs)], 'clf__n_estimators': [100, 300, 500], # sklearn has no tuning 'clf__max_depth': [None], 'clf__criterion': ['gini', 'entropy'], 'clf__max_features': [None, 'sqrt', 'log2', 0.5, 1], 'clf__min_samples_leaf': [1, 5, 10, 15, 20], 'clf__min_samples_split': [2, 5, 10, 15, 20] } ] #=========================== # DecisionTreeClassifier() https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/decision_tree.py https://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeClassifier.html #=========================== # Define estimator estimator = DecisionTreeClassifier(**rs) # Define pipleline with steps pipe_dt = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_dt = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [DecisionTreeClassifier(**rs)], # 'clf__max_depth': [None, 2, 6, 10, 14, 16, 20], 'clf__max_depth': [None, 0, 0.2, 0.5], 'clf__class_weight':[None, 'balanced'], 'clf__criterion': ['gini', 'entropy'], 'clf__max_features': [None, 'sqrt', 'log2', 1], 'clf__min_samples_leaf': [1, 5, 10, 15, 20], 'clf__min_samples_split': [2, 5, 10, 15, 20] } ] ######################################################################## #=========================== # RandomForestClassifier() https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/random_forest.py https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html #=========================== # Define estimator estimator = [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)](**rs) # Define pipleline with steps pipe_rf = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_rf = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [RandomForestClassifier(**rs, **njobs, bootstrap = True, oob_score = True)], # 'clf__max_depth': [4, 6, 8, 10, 12, 16, 20, None] 'clf__max_depth': [None, 2, 6, 10, 14, 16, 20] #autosk: None , 'clf__class_weight':[None, 'balanced'] , 'clf__n_estimators': [50, 100, 200, 300] # autodesk: no , 'clf__criterion': ['gini', 'entropy'] , 'clf__max_features': ['sqrt', 'log2', None, 0, 0.5, 1] , 'clf__min_samples_leaf': [1, 5, 10, 15, 20] , 'clf__min_samples_split': [2, 5, 15, 20] } ] #================= # XGBClassifier () #================= # https://www.kaggle.com/code/stuarthallows/using-xgboost-with-scikit-learn/notebook # Define estimator #https://www.datatechnotes.com/2019/07/classification-example-with.html # XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1, # colsample_bynode=1, colsample_bytree=1, gamma=0, learning_rate=0.1, # max_delta_step=0, max_depth=3, min_child_weight=1, missing=None, # n_estimators=100, n_jobs=1, nthread=None, # objective='multi:softprob', random_state=0, reg_alpha=0, # reg_lambda=1, scale_pos_weight=1, seed=None, silent=None, # subsample=1, verbosity=1) estimator = XGBClassifier(**rs, **njobs, verbose = 3) # Define pipleline with steps pipe_xgb = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) param_grid_xgb = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [XGBClassifier(**rs , **njobs, verbose = 3)], 'clf__learning_rate': [0.01, 0.05, 0.1, 0.2] , 'clf__max_depth' : [3, 8, 10, 12, 16, 20] , 'clf__n_estimators': [50, 100, 200, 300] } ] ####################################################################### ######################################################################## #====================== # BaggingClassifier()* #https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.BaggingClassifier.html #====================== estimator = BaggingClassifier(**rs , **njobs , bootstrap = True , oob_score = True) # Define pipleline with steps pipe_bc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_bc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [BaggingClassifier(**rs, **njobs , bootstrap = True, oob_score = True)], 'clf__n_estimators' : [10, 25, 50, 100, 150, 200, 500, 700, 1000] # , 'clf__base_estimator' : ['None', 'SVC()', 'KNeighborsClassifier()'] # if none, DT is used } ] ######################################################################## #====================== # BernoulliNB () #https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/bernoulli_nb.py #https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.BernoulliNB.html #====================== # Define estimator estimator = BernoulliNB() # Define pipleline with steps pipe_bnb = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_bnb = [ {'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [BernoulliNB()], 'clf__alpha': [0.01, 0, 1, 10, 100] , 'clf__binarize':[None, 0] # autosk has no, maybe just use None , 'clf__fit_prior': [True] , 'clf__class_prior': [None] } ] ######################################################################### #============================== # GradientBoostingClassifier() #https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/gradient_boosting.py #https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.GradientBoostingClassifier.html #============================== # Define estimator estimator = GradientBoostingClassifier(**rs) # Define pipleline with steps pipe_gbc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_gbc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [GradientBoostingClassifier(**rs)], 'clf__loss' : ['log_loss', 'exponential'], 'clf__n_estimators' : [10, 100, 200, 500, 1000], # autosklearn: not there 'clf__learning_rate' : [0.01,0.1, 0, 0.5, 1], 'clf__subsample' : [0.5, 0.7, 1.0], 'clf__max_depth' : [3, 7, 9], 'clf__min_samples_leaf' : [1, 20, 50, 100, 150, 200], 'clf__max_depth' : [None], 'clf__max_leaf_nodes' : [3, 31, 51, 331, 2047] # autosklearn: log = T 'clf__l2_regularization' : [0.0000000001, 0.000001, 0.0001, 0.01, 0.1, 1], #lower=1E-10, upper=1, log = T 'n_iter_no_change' : [None, 1, 5, 10, 15, 20], # autsk: 1, 20 'validation_fraction' : [0.01, 0.03, 0.2, 0.3, 0.4] # autosk: 0.01, 0.4 } ] ######################################################################### #=========================== # GaussianNB() https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/gaussian_nb.py https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.GaussianNB.html #=========================== # Define estimator estimator = GaussianNB() # Define pipleline with steps pipe_gnb = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_gnb = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [GaussianNB()], 'clf__priors': [None] , 'clf__var_smoothing': np.logspace(0,-9, num=100) } ] ######################################################################### #=========================== # GaussianProcessClassifier() * # https://scikit-learn.org/stable/modules/generated/sklearn.gaussian_process.GaussianProcessClassifier.html #=========================== # Define estimator estimator = GaussianProcessClassifier(**rs) # Define pipleline with steps pipe_gbc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_gbc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [GaussianProcessClassifier(**rs)], 'clf__kernel': [1*RBF(), 1*DotProduct(), 1*Matern(), 1*RationalQuadratic(), 1*WhiteKernel()] } ] ######################################################################### #=========================== # KNeighborsClassifier () #https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/k_nearest_neighbors.py #https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html #=========================== # Define estimator estimator = KNeighborsClassifier(**njobs) # Define pipleline with steps pipe_knn = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_knn = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [KNeighborsClassifier(**njobs)], #, 'clf__n_neighbors': [5, 7, 11] #'clf__n_neighbors': list(range(21, 51, 4),) 'clf__n_neighbors' : [1, 11, 21, 51, 71, 101], 'clf__metric' : ['euclidean', 'manhattan', 'minkowski'], 'clf__weights' : ['uniform', 'distance'] } ] ######################################################################### #=========================== # LogisticRegression () * # https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html #=========================== # Define estimator estimator = LogisticRegression(**rs) # Define pipleline with steps pipe_lr = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(LogisticRegression(**rs), cv = rskf_cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator)]) # Define hyperparmeter space to search for param_grid_lr = [ {'fs__min_features_to_select' : [1,2] # , 'fs__cv': [rskf_cv] }, { # 'clf': [LogisticRegression(**rs)], 'clf__C': np.logspace(0, 4, 10), 'clf__penalty': ['none', 'l1', 'l2', 'elasticnet'], 'clf__max_iter': list(range(100,800,100)), 'clf__solver': ['saga'] }, { # 'clf': [LogisticRegression(**rs)], 'clf__C': np.logspace(0, 4, 10), 'clf__penalty': ['l2', 'none'], 'clf__max_iter': list(range(100,800,100)), 'clf__solver': ['newton-cg', 'lbfgs', 'sag'] }, { # 'clf': [LogisticRegression(**rs)], 'clf__C': np.logspace(0, 4, 10), 'clf__penalty': ['l1', 'l2'], 'clf__max_iter': list(range(100,800,100)), 'clf__solver': ['liblinear'] } ] ######################################################################### #================== # MLPClassifier() https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/mlp.py https://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html #================== # Define estimator estimator = MLPClassifier(**rs) # Define pipleline with steps pipe_mlp = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) param_grid_mlp = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [MLPClassifier(**rs, max_iter = 1000)], 'clf__max_iter': [200, 500, 1000, 2000], # no autosklearn 'clf__hidden_layer_sizes': [(100), (1), (2), (3), (5), (10) ], #no autosklearn 'clf__solver': ['lbfgs', 'sgd', 'adam'], #no autosklearn 'clf__learning_rate': ['constant', 'invscaling', 'adaptive'] #no autosklearn } ] ######################################################################### #================================== # QuadraticDiscriminantAnalysis() https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/qda.py https://scikit-learn.org/stable/modules/generated/sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis.html #================================== # Define estimator estimator = QuadraticDiscriminantAnalysis(**rs) # Define pipleline with steps pipe_qda = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_qda = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [QuadraticDiscriminantAnalysis()], 'clf__priors': [None], 'clf__reg_param': [0, 1] } ] ######################################################################### #==================== # RidgeClassifier() * https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.RidgeClassifier.html #==================== # Define estimator estimator = RidgeClassifier(**rs) # Define pipleline with steps pipe_rc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) param_grid_rc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { #'clf' : [RidgeClassifier(**rs)], 'clf__alpha': [0.1, 0.2, 0.5, 0.8, 1.0] } ] ####################################################################### #======== # SVC() # https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/libsvm_svc.py # paper that supports libSVM/SVC param searching # https://www.csie.ntu.edu.tw/~cjlin/papers/guide/guide.pdf https://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html ##########https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/liblinear_svc.py (NOT the one used, but they are very similar!) #======== estimator = SVC(**rs) # Define pipleline with steps pipe_svc = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_svc = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [SVC(**rs)], # 'clf__kernel': ['poly', 'rbf', 'sigmoid'] , 'clf__kernel': ['rbf'] # , 'clf__C' : [50, 10, 1.0, 0.1, 0.01] , 'clf__C' : [1, 0.03, 10, 100, 1000, 10000, 32768] , 'clf__gamma' : ['scale', 'auto'] } ] ####################################################################### ####################################################################### #======== # LDA # https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/lda.py # https://scikit-learn.org/stable/modules/generated/sklearn.discriminant_analysis.LinearDiscriminantAnalysis.html #======== estimator = LinearDiscriminantAnalysis() # Define pipleline with steps pipe_lda = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_lda = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [LinearDiscriminantAnalysis()], 'clf__solver' : ['svd', 'lsqr', 'eigen'], 'clf__shrinkage' : [None, 'auto', 0, 0.5, 1], } ] ####################################################################### #======== # Multinomial_nb # https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/multinomial_nb.py # https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.MultinomialNB.html #======== estimator = MultinomialNB() # Define pipleline with steps pipe_mnb = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_mnb = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [MultinomialNB()], 'clf__alpha': [0.01, 0.1, 1, 20, 25, 50, 55, 100] } ] ####################################################################### #======== # passive_aggressive # https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/passive_aggressive.py # https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.PassiveAggressiveClassifier.html #======== estimator = PassiveAggressiveClassifier(**rs, **njobs) # Define pipleline with steps pipe_pa = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_pa = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [PassiveAggressiveClassifier(**rs, **njobs)], 'clf__C' : [1, 0.03, 10, 100, 1000, 10000, 32768], 'clf__maxt_iter' : [1000, 500, 200, 100, 50, 10, 1], 'clf__loss' : ['hinge', 'squared_hinge'], 'clf_tol' : [1e-4, 1e-5, 1e-2, 1e-1] } ] ####################################################################### #======== # SGD # https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/sgd.py # https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.SGDClassifier.html #======== estimator = SGDClassifier(**rs, **njobs) # Define pipleline with steps pipe_sgd = Pipeline([ ('pre', MinMaxScaler()) , ('fs', RFECV(DecisionTreeClassifier(**rs), cv = cv, scoring = 'matthews_corrcoef')) # , ('fs', RFECV(estimator, cv = cv, scoring = 'matthews_corrcoef')) , ('clf', estimator) ]) # Define hyperparmeter space to search for param_grid_sgd = [ { 'fs__min_features_to_select' : [1,2] # , 'fs__cv': [cv] }, { # 'clf': [SGDClassifier(**rs, **njobs)], 'clf__loss': = ['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron'], 'clf__penalty':['l1', 'l2', 'elasticnet'], 'clf__alpha': [0.0000001, 0.00001, 0.0001, 0.01, 0.1, 1, 10, 100], #autosk learn: 1e-7, 1e-1, log=True, default_value=0.0001 'clf__learning_rate': ['constant', 'optimal', 'invscaling', 'adaptive'], 'clf__eta0' : [0.0000001, 0.00001, 0.0001, 0.01] # autosklearn 1e-7, 1e-1, default_value=0.01, log=True } ]