renamed files
This commit is contained in:
parent
600f829972
commit
0fd3e75ab0
4 changed files with 6 additions and 6 deletions
105
dynamut/get_results.py
Executable file
105
dynamut/get_results.py
Executable file
|
@ -0,0 +1,105 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Wed Aug 19 14:33:51 2020
|
||||
|
||||
@author: tanu
|
||||
"""
|
||||
|
||||
|
||||
#%% load packages
|
||||
import os,sys
|
||||
import subprocess
|
||||
import argparse
|
||||
import requests
|
||||
import re
|
||||
import time
|
||||
from bs4 import BeautifulSoup
|
||||
import pandas as pd
|
||||
from pandas.api.types import is_string_dtype
|
||||
from pandas.api.types import is_numeric_dtype
|
||||
#%%============================================================================
|
||||
#streptomycin/gid_complex.pdb
|
||||
host_dynamut = 'http://biosig.unimelb.edu.au/dynamut'
|
||||
pred_dynamut_batch = '/results_prediction/161287964015'
|
||||
result_id = re.search( r"([0-9]+)$", pred_dynamut).group(0)
|
||||
|
||||
batch_result_url = host_dynamut + pred_dynamut_batch
|
||||
|
||||
mut = 'S2C'
|
||||
single_url = host_dynamut + '/single_results/' + str(result_id)
|
||||
single_result_url = host_dynamut + '/single_results/' + str(result_id) + '/' + mut
|
||||
print(single_result_url)
|
||||
|
||||
#%%============================================================================
|
||||
param_dict = {}
|
||||
|
||||
result_response = requests.get(single_result_url)
|
||||
if result_response.status_code == 200:
|
||||
print('Fetching results')
|
||||
# extract results using the html parser
|
||||
soup = BeautifulSoup(result_response.text, features = 'html.parser')
|
||||
#web_result_raw = soup.find(id = 'predictions').get_text()
|
||||
ddg_dynamut = soup.find(id = 'ddg_dynamut').get_text()
|
||||
ddg_encom = soup.find(id = 'ddg_encom').get_text()
|
||||
ddg_mcsm = soup.find(id = 'ddg_mcsm').get_text()
|
||||
ddg_sdm = soup.find(id = 'ddg_sdm').get_text()
|
||||
ddg_duet = soup.find(id = 'ddg_duet').get_text()
|
||||
dds_encom = soup.find(id = 'dds_encom').get_text()
|
||||
|
||||
param_dict = {"mutationinformation" : mut
|
||||
, "ddg_dynamut" : ddg_dynamut
|
||||
, "ddg_encom" : ddg_encom
|
||||
, "ddg_mcsm" : ddg_mcsm
|
||||
, "ddg_sdm" : ddg_sdm
|
||||
, "ddg_duet" : ddg_duet
|
||||
, "dds_encom" : dds_encom
|
||||
|
||||
}
|
||||
results_df = pd.DataFrame.from_dict(param_dict, orient = "index").T
|
||||
|
||||
print(results_df)
|
||||
|
||||
#%% for loop
|
||||
|
||||
single_url = host_dynamut + '/single_results/' + str(result_id)
|
||||
|
||||
muts = ["S2C", "S2F"]
|
||||
|
||||
# initilialise empty df
|
||||
dynamut_results_df = pd.DataFrame()
|
||||
|
||||
for i, mut in enumerate(muts):
|
||||
#param_dict = {}
|
||||
print('Running mutation', i+1, ':', mut)
|
||||
snp = mut
|
||||
single_result_url = single_url + '/' + snp
|
||||
print('Getting results from:', single_result_url)
|
||||
|
||||
result_response = requests.get(single_result_url)
|
||||
if result_response.status_code == 200:
|
||||
print('Fetching results')
|
||||
# extract results using the html parser
|
||||
soup = BeautifulSoup(result_response.text, features = 'html.parser')
|
||||
#web_result_raw = soup.find(id = 'predictions').get_text()
|
||||
ddg_dynamut = soup.find(id = 'ddg_dynamut').get_text()
|
||||
ddg_encom = soup.find(id = 'ddg_encom').get_text()
|
||||
ddg_mcsm = soup.find(id = 'ddg_mcsm').get_text()
|
||||
ddg_sdm = soup.find(id = 'ddg_sdm').get_text()
|
||||
ddg_duet = soup.find(id = 'ddg_duet').get_text()
|
||||
dds_encom = soup.find(id = 'dds_encom').get_text()
|
||||
|
||||
param_dict = {"mutationinformation" : snp
|
||||
, "ddg_dynamut" : ddg_dynamut
|
||||
, "ddg_encom" : ddg_encom
|
||||
, "ddg_mcsm" : ddg_mcsm
|
||||
, "ddg_sdm" : ddg_sdm
|
||||
, "ddg_duet" : ddg_duet
|
||||
, "dds_encom" : dds_encom
|
||||
}
|
||||
results_df = pd.DataFrame.from_dict(param_dict, orient = "index").T
|
||||
print(results_df)
|
||||
dynamut_results_df = dynamut_results_df.append(results_df)
|
||||
print(dynamut_results_df)
|
||||
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue