aaded scripts for FS including test call, etc
This commit is contained in:
parent
8fe0048328
commit
5dea35f97c
3 changed files with 575 additions and 0 deletions
65
scripts/ml/run_FS.py
Executable file
65
scripts/ml/run_FS.py
Executable file
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Tue May 24 08:11:05 2022
|
||||
|
||||
@author: tanu
|
||||
"""
|
||||
###############################################################################
|
||||
#====================
|
||||
# single model CALL
|
||||
#====================
|
||||
a_fs0 = fsgs(input_df = X
|
||||
, target = y
|
||||
, param_gridLd = [{'fs__min_features_to_select' : [1]}]
|
||||
, blind_test_df = X_bts
|
||||
, blind_test_target = y_bts
|
||||
, estimator = LogisticRegression(**rs)
|
||||
, use_fs = False # uses estimator as the RFECV parameter for fs. Set to TRUE if you want to supply custom_fs as shown below
|
||||
, custom_fs = RFECV(DecisionTreeClassifier(**rs) , cv = skf_cv, scoring = 'matthews_corrcoef')
|
||||
, cv_method = skf_cv
|
||||
, var_type = 'mixed'
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
##############################################################################
|
||||
#%% json output
|
||||
#========================================
|
||||
# Write final output file
|
||||
# https://stackoverflow.com/questions/19201290/how-to-save-a-dictionary-to-a-file
|
||||
#========================================
|
||||
# #output final dict as a json
|
||||
# outFile = 'LR_FS.json'
|
||||
# with open(outFile, 'w') as f:
|
||||
# f.write(json.dumps(output_modelD,cls=NpEncoder))
|
||||
|
||||
# # read json
|
||||
# file = 'LR_FS.json'
|
||||
# with open(file, 'r') as f:
|
||||
# data = json.load(f)
|
||||
##############################################################################
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue