thbndi commited on
Commit
93f39a5
1 Parent(s): daf5242

Upload 6 files

Browse files
Files changed (6) hide show
  1. cohort.py +136 -0
  2. data_generation_icu_modify.py +581 -0
  3. los.config +23 -0
  4. mortality.config +23 -0
  5. phenotype.config +23 -0
  6. readmission.config +23 -0
cohort.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ipywidgets as widgets
2
+ import sys
3
+ from pathlib import Path
4
+ import os
5
+ import importlib
6
+ sys.path.append('preprocessing/day_intervals_preproc')
7
+ sys.path.append('utils')
8
+ sys.path.append('preprocessing/hosp_module_preproc')
9
+ sys.path.append('model')
10
+ #print(sys.path)
11
+ root_dir = os.path.dirname(os.path.abspath('UserInterface.ipynb'))
12
+ import day_intervals_cohort_v2
13
+ import day_intervals_cohort
14
+ import feature_selection_icu
15
+ import data_generation_icu_modify
16
+ import time
17
+ import yaml
18
+
19
+ def task_cohort(task,mimic_path):
20
+ version_path = mimic_path
21
+ version = version_path.split('/')[-1][0]
22
+ start = time.time()
23
+ #----------------------------------------------config----------------------------------------------------
24
+ if task=='Mortality':
25
+ with open('./config/mortality.config') as f:
26
+ config = yaml.safe_load(f)
27
+
28
+ elif task == 'Phenotype':
29
+ with open('./config/phenotype.config') as f:
30
+ config = yaml.safe_load(f)
31
+
32
+ elif task == 'Length of Stay':
33
+ with open('./config/los.config') as f:
34
+ config = yaml.safe_load(f)
35
+
36
+ elif task == 'Readmission':
37
+ with open('./config/readmission.config') as f:
38
+ config = yaml.safe_load(f)
39
+
40
+ disease_label = config['disease_label']
41
+ tim = config['time']
42
+ label = config['label']
43
+ timeW = config['timeW']
44
+ include=int(timeW.split()[1])
45
+ bucket = config['bucket']
46
+ radimp = config['radimp']
47
+ predW = config['predW']
48
+ diag_flag = config['diagnosis']
49
+ out_flag = config['output']
50
+ chart_flag = config['chart']
51
+ proc_flag= config['proc']
52
+ med_flag = config['meds']
53
+ disease_filter = config['disease_filter']
54
+ icu_no_icu = config['icu_no_icu']
55
+ groupingICD = config['groupingICD']
56
+ # -------------------------------------------------------------------------------------------------------------
57
+
58
+ data_icu=icu_no_icu=="ICU"
59
+ data_mort=label=="Mortality"
60
+ data_admn=label=='Readmission'
61
+ data_los=label=='Length of Stay'
62
+
63
+ if (disease_filter=="Heart Failure"):
64
+ icd_code='I50'
65
+ elif (disease_filter=="CKD"):
66
+ icd_code='N18'
67
+ elif (disease_filter=="COPD"):
68
+ icd_code='J44'
69
+ elif (disease_filter=="CAD"):
70
+ icd_code='I25'
71
+ else:
72
+ icd_code='No Disease Filter'
73
+
74
+ #-----------------------------------------------EXTRACT MIMIC-----------------------------------------------------
75
+ if version == '2':
76
+ cohort_output = day_intervals_cohort_v2.extract_data(icu_no_icu,label,tim,icd_code, root_dir,disease_label)
77
+ elif version == '1':
78
+ cohort_output = day_intervals_cohort.extract_data(icu_no_icu,label,tim,icd_code, root_dir,disease_label)
79
+ end = time.time()
80
+ print("Time elapsed : ", round((end - start)/60,2),"mins")
81
+ #----------------------------------------------FEATURES-------------------------------------------------------
82
+ if data_icu :
83
+ feature_selection_icu.feature_icu(cohort_output, version_path,diag_flag,out_flag,chart_flag,proc_flag,med_flag)
84
+ end = time.time()
85
+ print("Time elapsed : ", round((end - start)/60,2),"mins")
86
+ #----------------------------------------------GROUPING-------------------------------------------------------
87
+ group_diag=False
88
+ group_med=False
89
+ group_proc=False
90
+ if data_icu:
91
+ if diag_flag:
92
+ group_diag=groupingICD
93
+ feature_selection_icu.preprocess_features_icu(cohort_output, diag_flag, group_diag,False,False,False,0,0)
94
+ end = time.time()
95
+ print("Time elapsed : ", round((end - start)/60,2),"mins")
96
+ #----------------------------------------------SUMMARY-------------------------------------------------------
97
+ if data_icu:
98
+ feature_selection_icu.generate_summary_icu(cohort_output,diag_flag,proc_flag,med_flag,out_flag,chart_flag)
99
+ end = time.time()
100
+ print("Time elapsed : ", round((end - start)/60,2),"mins")
101
+ #----------------------------------------------FEATURE SELECTION---------------------------------------------
102
+
103
+ select_diag= config['select_diag']
104
+ select_med= config['select_med']
105
+ select_proc= config['select_proc']
106
+ #select_lab= config['select_lab']
107
+ select_out= config['select_out']
108
+ select_chart= config['select_chart']
109
+
110
+ feature_selection_icu.features_selection_icu(cohort_output, diag_flag,proc_flag,med_flag,out_flag, chart_flag,select_diag,select_med,select_proc,select_out,select_chart)
111
+ end = time.time()
112
+ print("Time elapsed : ", round((end - start)/60,2),"mins")
113
+ #---------------------------------------CLEANING OF FEATURES-----------------------------------------------
114
+ thresh=0
115
+ if data_icu:
116
+ if chart_flag:
117
+ outlier_removal=config['outlier_removal']
118
+ clean_chart=outlier_removal!='No outlier detection'
119
+ impute_outlier_chart=outlier_removal=='Impute Outlier (default:98)'
120
+ thresh=config['outlier']
121
+ left_thresh=config['left_outlier']
122
+ feature_selection_icu.preprocess_features_icu(cohort_output, False, False,chart_flag,clean_chart,impute_outlier_chart,thresh,left_thresh)
123
+ end = time.time()
124
+ print("Time elapsed : ", round((end - start)/60,2),"mins")
125
+ # ---------------------------------------tim-Series Representation--------------------------------------------
126
+ if radimp == 'forward fill and mean' :
127
+ impute='Mean'
128
+ elif radimp =='forward fill and median':
129
+ impute = 'Median'
130
+ else :
131
+ impute = False
132
+
133
+ if data_icu:
134
+ gen=data_generation_icu_modify.Generator(task,cohort_output,data_mort,data_admn,data_los,diag_flag,proc_flag,out_flag,chart_flag,med_flag,impute,include,bucket,predW)
135
+ print("[============TASK COHORT SUCCESSFULLY CREATED============]")
136
+
data_generation_icu_modify.py ADDED
@@ -0,0 +1,581 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pandas as pd
3
+ from tqdm import tqdm
4
+ from datetime import datetime
5
+ from sklearn.preprocessing import LabelEncoder
6
+ import pickle
7
+ import datetime
8
+ import os
9
+ import sys
10
+ from pathlib import Path
11
+ sys.path.append(os.path.dirname(os.path.abspath(__file__)) + './../..')
12
+ if not os.path.exists("./data/dict"):
13
+ os.makedirs("./data/dict")
14
+ if not os.path.exists("./data/csv"):
15
+ os.makedirs("./data/csv")
16
+
17
+ class Generator():
18
+ def __init__(self,task,cohort_output,if_mort,if_admn,if_los,feat_cond,feat_proc,feat_out,feat_chart,feat_med,impute,include_time=24,bucket=1,predW=6):
19
+ self.feat_cond,self.feat_proc,self.feat_out,self.feat_chart,self.feat_med = feat_cond,feat_proc,feat_out,feat_chart,feat_med
20
+ self.cohort_output=cohort_output
21
+ self.impute=impute
22
+ self.task = task
23
+ self.data = self.generate_adm()
24
+ if not os.path.exists("./data/dict/"+self.cohort_output):
25
+ os.makedirs("./data/dict/"+self.cohort_output)
26
+ print("[ READ COHORT ]")
27
+
28
+ self.generate_feat()
29
+ print("[ READ ALL FEATURES ]")
30
+
31
+ if if_mort:
32
+ self.mortality_length(include_time,predW)
33
+ print("[ PROCESSED TIME SERIES TO EQUAL LENGTH ]")
34
+ elif if_admn:
35
+ self.readmission_length(include_time)
36
+ print("[ PROCESSED TIME SERIES TO EQUAL LENGTH ]")
37
+ elif if_los:
38
+ self.los_length(include_time)
39
+ print("[ PROCESSED TIME SERIES TO EQUAL LENGTH ]")
40
+
41
+ self.smooth_meds(bucket)
42
+ print("[ SUCCESSFULLY SAVED DATA DICTIONARIES ]")
43
+
44
+ def generate_feat(self):
45
+ if(self.feat_cond):
46
+ print("[ ======READING DIAGNOSIS ]")
47
+ self.generate_cond()
48
+ if(self.feat_proc):
49
+ print("[ ======READING PROCEDURES ]")
50
+ self.generate_proc()
51
+ if(self.feat_out):
52
+ print("[ ======READING OUT EVENTS ]")
53
+ self.generate_out()
54
+ if(self.feat_chart):
55
+ print("[ ======READING CHART EVENTS ]")
56
+ self.generate_chart()
57
+ if(self.feat_med):
58
+ print("[ ======READING MEDICATIONS ]")
59
+ self.generate_meds()
60
+
61
+ def generate_adm(self):
62
+ data=pd.read_csv(f"./data/cohort/{self.cohort_output}.csv.gz", compression='gzip', header=0, index_col=None)
63
+ data['intime'] = pd.to_datetime(data['intime'])
64
+ data['outtime'] = pd.to_datetime(data['outtime'])
65
+ data['los']=pd.to_timedelta(data['outtime']-data['intime'],unit='h')
66
+ data['los']=data['los'].astype(str)
67
+ data[['days', 'dummy','hours']] = data['los'].str.split(' ', expand=True)
68
+ data[['hours','min','sec']] = data['hours'].str.split(':', expand=True)
69
+ data['los']=pd.to_numeric(data['days'])*24+pd.to_numeric(data['hours'])
70
+ data=data.drop(columns=['days', 'dummy','hours','min','sec'])
71
+ data=data[data['los']>0]
72
+ data['Age']=data['Age'].astype(int)
73
+ #print(data.head())
74
+ #print(data.shape)
75
+ return data
76
+
77
+ def generate_cond(self):
78
+ cond=pd.read_csv("./data/features/preproc_diag_icu.csv.gz", compression='gzip', header=0, index_col=None)
79
+ cond=cond[cond['stay_id'].isin(self.data['stay_id'])]
80
+ cond_per_adm = cond.groupby('stay_id').size().max()
81
+ self.cond, self.cond_per_adm = cond, cond_per_adm
82
+
83
+ def generate_proc(self):
84
+ proc=pd.read_csv("./data/features/preproc_proc_icu.csv.gz", compression='gzip', header=0, index_col=None)
85
+ proc=proc[proc['stay_id'].isin(self.data['stay_id'])]
86
+ proc[['start_days', 'dummy','start_hours']] = proc['event_time_from_admit'].str.split(' ', -1, expand=True)
87
+ proc[['start_hours','min','sec']] = proc['start_hours'].str.split(':', -1, expand=True)
88
+ proc['start_time']=pd.to_numeric(proc['start_days'])*24+pd.to_numeric(proc['start_hours'])
89
+ proc=proc.drop(columns=['start_days', 'dummy','start_hours','min','sec'])
90
+ proc=proc[proc['start_time']>=0]
91
+
92
+ ###Remove where event time is after discharge time
93
+ proc=pd.merge(proc,self.data[['stay_id','los']],on='stay_id',how='left')
94
+ proc['sanity']=proc['los']-proc['start_time']
95
+ proc=proc[proc['sanity']>0]
96
+ del proc['sanity']
97
+
98
+ self.proc=proc
99
+
100
+ def generate_out(self):
101
+ out=pd.read_csv("./data/features/preproc_out_icu.csv.gz", compression='gzip', header=0, index_col=None)
102
+ out=out[out['stay_id'].isin(self.data['stay_id'])]
103
+ out[['start_days', 'dummy','start_hours']] = out['event_time_from_admit'].str.split(' ', -1, expand=True)
104
+ out[['start_hours','min','sec']] = out['start_hours'].str.split(':', -1, expand=True)
105
+ out['start_time']=pd.to_numeric(out['start_days'])*24+pd.to_numeric(out['start_hours'])
106
+ out=out.drop(columns=['start_days', 'dummy','start_hours','min','sec'])
107
+ out=out[out['start_time']>=0]
108
+
109
+ ###Remove where event time is after discharge time
110
+ out=pd.merge(out,self.data[['stay_id','los']],on='stay_id',how='left')
111
+ out['sanity']=out['los']-out['start_time']
112
+ out=out[out['sanity']>0]
113
+ del out['sanity']
114
+
115
+ self.out=out
116
+
117
+
118
+ def generate_chart(self):
119
+ chunksize = 5000000
120
+ final=pd.DataFrame()
121
+ for chart in tqdm(pd.read_csv("./data/features/preproc_chart_icu.csv.gz", compression='gzip', header=0, index_col=None,chunksize=chunksize)):
122
+ chart=chart[chart['stay_id'].isin(self.data['stay_id'])]
123
+ chart[['start_days', 'dummy','start_hours']] = chart['event_time_from_admit'].str.split(' ', -1, expand=True)
124
+ chart[['start_hours','min','sec']] = chart['start_hours'].str.split(':', -1, expand=True)
125
+ chart['start_time']=pd.to_numeric(chart['start_days'])*24+pd.to_numeric(chart['start_hours'])
126
+ chart=chart.drop(columns=['start_days', 'dummy','start_hours','min','sec','event_time_from_admit'])
127
+ chart=chart[chart['start_time']>=0]
128
+
129
+ ###Remove where event time is after discharge time
130
+ chart=pd.merge(chart,self.data[['stay_id','los']],on='stay_id',how='left')
131
+ chart['sanity']=chart['los']-chart['start_time']
132
+ chart=chart[chart['sanity']>0]
133
+ del chart['sanity']
134
+ del chart['los']
135
+
136
+ if final.empty:
137
+ final=chart
138
+ else:
139
+ final=final.append(chart, ignore_index=True)
140
+
141
+ self.chart=final
142
+
143
+
144
+
145
+ def generate_meds(self):
146
+ meds=pd.read_csv("./data/features/preproc_med_icu.csv.gz", compression='gzip', header=0, index_col=None)
147
+ meds[['start_days', 'dummy','start_hours']] = meds['start_hours_from_admit'].str.split(' ', -1, expand=True)
148
+ meds[['start_hours','min','sec']] = meds['start_hours'].str.split(':', -1, expand=True)
149
+ meds['start_time']=pd.to_numeric(meds['start_days'])*24+pd.to_numeric(meds['start_hours'])
150
+ meds[['start_days', 'dummy','start_hours']] = meds['stop_hours_from_admit'].str.split(' ', -1, expand=True)
151
+ meds[['start_hours','min','sec']] = meds['start_hours'].str.split(':', -1, expand=True)
152
+ meds['stop_time']=pd.to_numeric(meds['start_days'])*24+pd.to_numeric(meds['start_hours'])
153
+ meds=meds.drop(columns=['start_days', 'dummy','start_hours','min','sec'])
154
+ #####Sanity check
155
+ meds['sanity']=meds['stop_time']-meds['start_time']
156
+ meds=meds[meds['sanity']>0]
157
+ del meds['sanity']
158
+ #####Select hadm_id as in main file
159
+ meds=meds[meds['stay_id'].isin(self.data['stay_id'])]
160
+ meds=pd.merge(meds,self.data[['stay_id','los']],on='stay_id',how='left')
161
+
162
+ #####Remove where start time is after end of visit
163
+ meds['sanity']=meds['los']-meds['start_time']
164
+ meds=meds[meds['sanity']>0]
165
+ del meds['sanity']
166
+ ####Any stop_time after end of visit is set at end of visit
167
+ meds.loc[meds['stop_time'] > meds['los'],'stop_time']=meds.loc[meds['stop_time'] > meds['los'],'los']
168
+ del meds['los']
169
+
170
+ meds['rate']=meds['rate'].apply(pd.to_numeric, errors='coerce')
171
+ meds['amount']=meds['amount'].apply(pd.to_numeric, errors='coerce')
172
+
173
+ self.meds=meds
174
+
175
+ def mortality_length(self,include_time,predW):
176
+ print("include_time",include_time)
177
+ self.los=include_time
178
+ self.data=self.data[(self.data['los']>=include_time+predW)]
179
+ self.hids=self.data['stay_id'].unique()
180
+
181
+ if(self.feat_cond):
182
+ self.cond=self.cond[self.cond['stay_id'].isin(self.data['stay_id'])]
183
+
184
+ self.data['los']=include_time
185
+
186
+ ####Make equal length input time series and remove data for pred window if needed
187
+
188
+ ###MEDS
189
+ if(self.feat_med):
190
+ self.meds=self.meds[self.meds['stay_id'].isin(self.data['stay_id'])]
191
+ self.meds=self.meds[self.meds['start_time']<=include_time]
192
+ self.meds.loc[self.meds.stop_time >include_time, 'stop_time']=include_time
193
+
194
+
195
+ ###PROCS
196
+ if(self.feat_proc):
197
+ self.proc=self.proc[self.proc['stay_id'].isin(self.data['stay_id'])]
198
+ self.proc=self.proc[self.proc['start_time']<=include_time]
199
+
200
+ ###OUT
201
+ if(self.feat_out):
202
+ self.out=self.out[self.out['stay_id'].isin(self.data['stay_id'])]
203
+ self.out=self.out[self.out['start_time']<=include_time]
204
+
205
+ ###CHART
206
+ if(self.feat_chart):
207
+ self.chart=self.chart[self.chart['stay_id'].isin(self.data['stay_id'])]
208
+ self.chart=self.chart[self.chart['start_time']<=include_time]
209
+
210
+ #self.los=include_time
211
+ def los_length(self,include_time):
212
+ print("include_time",include_time)
213
+ self.los=include_time
214
+ self.data=self.data[(self.data['los']>=include_time)]
215
+ self.hids=self.data['stay_id'].unique()
216
+
217
+ if(self.feat_cond):
218
+ self.cond=self.cond[self.cond['stay_id'].isin(self.data['stay_id'])]
219
+
220
+ self.data['los']=include_time
221
+
222
+ ####Make equal length input time series and remove data for pred window if needed
223
+
224
+ ###MEDS
225
+ if(self.feat_med):
226
+ self.meds=self.meds[self.meds['stay_id'].isin(self.data['stay_id'])]
227
+ self.meds=self.meds[self.meds['start_time']<=include_time]
228
+ self.meds.loc[self.meds.stop_time >include_time, 'stop_time']=include_time
229
+
230
+
231
+ ###PROCS
232
+ if(self.feat_proc):
233
+ self.proc=self.proc[self.proc['stay_id'].isin(self.data['stay_id'])]
234
+ self.proc=self.proc[self.proc['start_time']<=include_time]
235
+
236
+ ###OUT
237
+ if(self.feat_out):
238
+ self.out=self.out[self.out['stay_id'].isin(self.data['stay_id'])]
239
+ self.out=self.out[self.out['start_time']<=include_time]
240
+
241
+ ###CHART
242
+ if(self.feat_chart):
243
+ self.chart=self.chart[self.chart['stay_id'].isin(self.data['stay_id'])]
244
+ self.chart=self.chart[self.chart['start_time']<=include_time]
245
+
246
+ def readmission_length(self,include_time):
247
+ self.los=include_time
248
+ self.data=self.data[(self.data['los']>=include_time)]
249
+ self.hids=self.data['stay_id'].unique()
250
+
251
+ if(self.feat_cond):
252
+ self.cond=self.cond[self.cond['stay_id'].isin(self.data['stay_id'])]
253
+ self.data['select_time']=self.data['los']-include_time
254
+ self.data['los']=include_time
255
+
256
+ ####Make equal length input time series and remove data for pred window if needed
257
+
258
+ ###MEDS
259
+ if(self.feat_med):
260
+ self.meds=self.meds[self.meds['stay_id'].isin(self.data['stay_id'])]
261
+ self.meds=pd.merge(self.meds,self.data[['stay_id','select_time']],on='stay_id',how='left')
262
+ self.meds['stop_time']=self.meds['stop_time']-self.meds['select_time']
263
+ self.meds['start_time']=self.meds['start_time']-self.meds['select_time']
264
+ self.meds=self.meds[self.meds['stop_time']>=0]
265
+ self.meds.loc[self.meds.start_time <0, 'start_time']=0
266
+
267
+ ###PROCS
268
+ if(self.feat_proc):
269
+ self.proc=self.proc[self.proc['stay_id'].isin(self.data['stay_id'])]
270
+ self.proc=pd.merge(self.proc,self.data[['stay_id','select_time']],on='stay_id',how='left')
271
+ self.proc['start_time']=self.proc['start_time']-self.proc['select_time']
272
+ self.proc=self.proc[self.proc['start_time']>=0]
273
+
274
+ ###OUT
275
+ if(self.feat_out):
276
+ self.out=self.out[self.out['stay_id'].isin(self.data['stay_id'])]
277
+ self.out=pd.merge(self.out,self.data[['stay_id','select_time']],on='stay_id',how='left')
278
+ self.out['start_time']=self.out['start_time']-self.out['select_time']
279
+ self.out=self.out[self.out['start_time']>=0]
280
+
281
+ ###CHART
282
+ if(self.feat_chart):
283
+ self.chart=self.chart[self.chart['stay_id'].isin(self.data['stay_id'])]
284
+ self.chart=pd.merge(self.chart,self.data[['stay_id','select_time']],on='stay_id',how='left')
285
+ self.chart['start_time']=self.chart['start_time']-self.chart['select_time']
286
+ self.chart=self.chart[self.chart['start_time']>=0]
287
+
288
+
289
+ def smooth_meds(self,bucket):
290
+ final_meds=pd.DataFrame()
291
+ final_proc=pd.DataFrame()
292
+ final_out=pd.DataFrame()
293
+ final_chart=pd.DataFrame()
294
+
295
+ if(self.feat_med):
296
+ self.meds=self.meds.sort_values(by=['start_time'])
297
+ if(self.feat_proc):
298
+ self.proc=self.proc.sort_values(by=['start_time'])
299
+ if(self.feat_out):
300
+ self.out=self.out.sort_values(by=['start_time'])
301
+ if(self.feat_chart):
302
+ self.chart=self.chart.sort_values(by=['start_time'])
303
+
304
+ t=0
305
+ for i in tqdm(range(0,self.los,bucket)):
306
+ ###MEDS
307
+ if(self.feat_med):
308
+ sub_meds=self.meds[(self.meds['start_time']>=i) & (self.meds['start_time']<i+bucket)].groupby(['stay_id','itemid','orderid']).agg({'stop_time':'max','subject_id':'max','rate':np.nanmean,'amount':np.nanmean})
309
+ sub_meds=sub_meds.reset_index()
310
+ sub_meds['start_time']=t
311
+ sub_meds['stop_time']=sub_meds['stop_time']/bucket
312
+ if final_meds.empty:
313
+ final_meds=sub_meds
314
+ else:
315
+ final_meds=final_meds.append(sub_meds)
316
+
317
+ ###PROC
318
+ if(self.feat_proc):
319
+ sub_proc=self.proc[(self.proc['start_time']>=i) & (self.proc['start_time']<i+bucket)].groupby(['stay_id','itemid']).agg({'subject_id':'max'})
320
+ sub_proc=sub_proc.reset_index()
321
+ sub_proc['start_time']=t
322
+ if final_proc.empty:
323
+ final_proc=sub_proc
324
+ else:
325
+ final_proc=final_proc.append(sub_proc)
326
+
327
+ ###OUT
328
+ if(self.feat_out):
329
+ sub_out=self.out[(self.out['start_time']>=i) & (self.out['start_time']<i+bucket)].groupby(['stay_id','itemid']).agg({'subject_id':'max'})
330
+ sub_out=sub_out.reset_index()
331
+ sub_out['start_time']=t
332
+ if final_out.empty:
333
+ final_out=sub_out
334
+ else:
335
+ final_out=final_out.append(sub_out)
336
+
337
+
338
+ ###CHART
339
+ if(self.feat_chart):
340
+ sub_chart=self.chart[(self.chart['start_time']>=i) & (self.chart['start_time']<i+bucket)].groupby(['stay_id','itemid']).agg({'valuenum':np.nanmean})
341
+ sub_chart=sub_chart.reset_index()
342
+ sub_chart['start_time']=t
343
+ if final_chart.empty:
344
+ final_chart=sub_chart
345
+ else:
346
+ final_chart=final_chart.append(sub_chart)
347
+
348
+ t=t+1
349
+ print("bucket",bucket)
350
+ los=int(self.los/bucket)
351
+
352
+
353
+ ###MEDS
354
+ if(self.feat_med):
355
+ f2_meds=final_meds.groupby(['stay_id','itemid','orderid']).size()
356
+ self.med_per_adm=f2_meds.groupby('stay_id').sum().reset_index()[0].max()
357
+ self.medlength_per_adm=final_meds.groupby('stay_id').size().max()
358
+
359
+ ###PROC
360
+ if(self.feat_proc):
361
+ f2_proc=final_proc.groupby(['stay_id','itemid']).size()
362
+ self.proc_per_adm=f2_proc.groupby('stay_id').sum().reset_index()[0].max()
363
+ self.proclength_per_adm=final_proc.groupby('stay_id').size().max()
364
+
365
+ ###OUT
366
+ if(self.feat_out):
367
+ f2_out=final_out.groupby(['stay_id','itemid']).size()
368
+ self.out_per_adm=f2_out.groupby('stay_id').sum().reset_index()[0].max()
369
+ self.outlength_per_adm=final_out.groupby('stay_id').size().max()
370
+
371
+
372
+ ###chart
373
+ if(self.feat_chart):
374
+ f2_chart=final_chart.groupby(['stay_id','itemid']).size()
375
+ self.chart_per_adm=f2_chart.groupby('stay_id').sum().reset_index()[0].max()
376
+ self.chartlength_per_adm=final_chart.groupby('stay_id').size().max()
377
+
378
+ print("[ PROCESSED TIME SERIES TO EQUAL TIME INTERVAL ]")
379
+ ###CREATE DICT
380
+ # if(self.feat_chart):
381
+ # self.create_chartDict(final_chart,los)
382
+ # else:
383
+ self.create_Dict(final_meds,final_proc,final_out,final_chart,los)
384
+
385
+
386
+ def create_Dict(self,meds,proc,out,chart,los):
387
+ dataDic={}
388
+
389
+ for hid in self.hids:
390
+ grp=self.data[self.data['stay_id']==hid]
391
+ dataDic[hid]={'Cond':{},'Proc':{},'Med':{},'Out':{},'Chart':{},'ethnicity':grp['ethnicity'].iloc[0],'age':int(grp['Age']),'gender':grp['gender'].iloc[0],'label':int(grp['label'])}
392
+
393
+
394
+ for hid in tqdm(self.hids):
395
+ grp=self.data[self.data['stay_id']==hid]
396
+
397
+ ###MEDS
398
+ if(self.feat_med):
399
+ feat=meds['itemid'].unique()
400
+ df2=meds[meds['stay_id']==hid]
401
+ if df2.shape[0]==0:
402
+ amount=pd.DataFrame(np.zeros([los,len(feat)]),columns=feat)
403
+ amount=amount.fillna(0)
404
+ amount.columns=pd.MultiIndex.from_product([["MEDS"], amount.columns])
405
+ else:
406
+ rate=df2.pivot_table(index='start_time',columns='itemid',values='rate')
407
+ #print(rate)
408
+ amount=df2.pivot_table(index='start_time',columns='itemid',values='amount')
409
+ df2=df2.pivot_table(index='start_time',columns='itemid',values='stop_time')
410
+ #print(df2.shape)
411
+ add_indices = pd.Index(range(los)).difference(df2.index)
412
+ add_df = pd.DataFrame(index=add_indices, columns=df2.columns).fillna(np.nan)
413
+ df2=pd.concat([df2, add_df])
414
+ df2=df2.sort_index()
415
+ df2=df2.ffill()
416
+ df2=df2.fillna(0)
417
+
418
+ rate=pd.concat([rate, add_df])
419
+ rate=rate.sort_index()
420
+ rate=rate.ffill()
421
+ rate=rate.fillna(-1)
422
+
423
+ amount=pd.concat([amount, add_df])
424
+ amount=amount.sort_index()
425
+ amount=amount.ffill()
426
+ amount=amount.fillna(-1)
427
+ #print(df2.head())
428
+ df2.iloc[:,0:]=df2.iloc[:,0:].sub(df2.index,0)
429
+ df2[df2>0]=1
430
+ df2[df2<0]=0
431
+ rate.iloc[:,0:]=df2.iloc[:,0:]*rate.iloc[:,0:]
432
+ amount.iloc[:,0:]=df2.iloc[:,0:]*amount.iloc[:,0:]
433
+ #print(df2.head())
434
+ dataDic[hid]['Med']['signal']=df2.iloc[:,0:].to_dict(orient="list")
435
+ dataDic[hid]['Med']['rate']=rate.iloc[:,0:].to_dict(orient="list")
436
+ dataDic[hid]['Med']['amount']=amount.iloc[:,0:].to_dict(orient="list")
437
+
438
+ ###PROCS
439
+ if(self.feat_proc):
440
+ feat=proc['itemid'].unique()
441
+ df2=proc[proc['stay_id']==hid]
442
+ if df2.shape[0]==0:
443
+ df2=pd.DataFrame(np.zeros([los,len(feat)]),columns=feat)
444
+ df2=df2.fillna(0)
445
+ df2.columns=pd.MultiIndex.from_product([["PROC"], df2.columns])
446
+ else:
447
+ df2['val']=1
448
+ #print(df2)
449
+ df2=df2.pivot_table(index='start_time',columns='itemid',values='val')
450
+ #print(df2.shape)
451
+ add_indices = pd.Index(range(los)).difference(df2.index)
452
+ add_df = pd.DataFrame(index=add_indices, columns=df2.columns).fillna(np.nan)
453
+ df2=pd.concat([df2, add_df])
454
+ df2=df2.sort_index()
455
+ df2=df2.fillna(0)
456
+ df2[df2>0]=1
457
+ #print(df2.head())
458
+ dataDic[hid]['Proc']=df2.to_dict(orient="list")
459
+
460
+ ###OUT
461
+ if(self.feat_out):
462
+ feat=out['itemid'].unique()
463
+ df2=out[out['stay_id']==hid]
464
+ if df2.shape[0]==0:
465
+ df2=pd.DataFrame(np.zeros([los,len(feat)]),columns=feat)
466
+ df2=df2.fillna(0)
467
+ df2.columns=pd.MultiIndex.from_product([["OUT"], df2.columns])
468
+ else:
469
+ df2['val']=1
470
+ df2=df2.pivot_table(index='start_time',columns='itemid',values='val')
471
+ #print(df2.shape)
472
+ add_indices = pd.Index(range(los)).difference(df2.index)
473
+ add_df = pd.DataFrame(index=add_indices, columns=df2.columns).fillna(np.nan)
474
+ df2=pd.concat([df2, add_df])
475
+ df2=df2.sort_index()
476
+ df2=df2.fillna(0)
477
+ df2[df2>0]=1
478
+ #print(df2.head())
479
+ dataDic[hid]['Out']=df2.to_dict(orient="list")
480
+
481
+ ###CHART
482
+ if(self.feat_chart):
483
+ feat=chart['itemid'].unique()
484
+ df2=chart[chart['stay_id']==hid]
485
+ if df2.shape[0]==0:
486
+ val=pd.DataFrame(np.zeros([los,len(feat)]),columns=feat)
487
+ val=val.fillna(0)
488
+ val.columns=pd.MultiIndex.from_product([["CHART"], val.columns])
489
+ else:
490
+ val=df2.pivot_table(index='start_time',columns='itemid',values='valuenum')
491
+ df2['val']=1
492
+ df2=df2.pivot_table(index='start_time',columns='itemid',values='val')
493
+ #print(df2.shape)
494
+ add_indices = pd.Index(range(los)).difference(df2.index)
495
+ add_df = pd.DataFrame(index=add_indices, columns=df2.columns).fillna(np.nan)
496
+ df2=pd.concat([df2, add_df])
497
+ df2=df2.sort_index()
498
+ df2=df2.fillna(0)
499
+
500
+ val=pd.concat([val, add_df])
501
+ val=val.sort_index()
502
+ if self.impute=='Mean':
503
+ val=val.ffill()
504
+ val=val.bfill()
505
+ val=val.fillna(val.mean())
506
+ elif self.impute=='Median':
507
+ val=val.ffill()
508
+ val=val.bfill()
509
+ val=val.fillna(val.median())
510
+ val=val.fillna(0)
511
+
512
+
513
+ df2[df2>0]=1
514
+ df2[df2<0]=0
515
+ #print(df2.head())
516
+ dataDic[hid]['Chart']['signal']=df2.iloc[:,0:].to_dict(orient="list")
517
+ dataDic[hid]['Chart']['val']=val.iloc[:,0:].to_dict(orient="list")
518
+
519
+ ##########COND#########
520
+ if(self.feat_cond):
521
+ feat=self.cond['new_icd_code'].unique()
522
+ grp=self.cond[self.cond['stay_id']==hid]
523
+ if(grp.shape[0]==0):
524
+ dataDic[hid]['Cond']={'fids':list(['<PAD>'])}
525
+ else:
526
+ dataDic[hid]['Cond']={'fids':list(grp['new_icd_code'])}
527
+
528
+
529
+ ######SAVE DICTIONARIES##############
530
+ metaDic={'Cond':{},'Proc':{},'Med':{},'Out':{},'Chart':{},'LOS':{}}
531
+ metaDic['LOS']=los
532
+ with open("./data/dict/dataDic", 'wb') as fp:
533
+ pickle.dump(dataDic, fp)
534
+
535
+ with open("./data/dict/hadmDic", 'wb') as fp:
536
+ pickle.dump(self.hids, fp)
537
+
538
+ with open("./data/dict/ethVocab", 'wb') as fp:
539
+ pickle.dump(list(self.data['ethnicity'].unique()), fp)
540
+ self.eth_vocab = self.data['ethnicity'].nunique()
541
+
542
+ with open("./data/dict/ageVocab", 'wb') as fp:
543
+ pickle.dump(list(self.data['Age'].unique()), fp)
544
+ self.age_vocab = self.data['Age'].nunique()
545
+
546
+ with open("./data/dict/insVocab", 'wb') as fp:
547
+ pickle.dump(list(self.data['insurance'].unique()), fp)
548
+ self.ins_vocab = self.data['insurance'].nunique()
549
+
550
+ if(self.feat_med):
551
+ with open("./data/dict/medVocab", 'wb') as fp:
552
+ pickle.dump(list(meds['itemid'].unique()), fp)
553
+ self.med_vocab = meds['itemid'].nunique()
554
+ metaDic['Med']=self.med_per_adm
555
+
556
+ if(self.feat_out):
557
+ with open("./data/dict/outVocab", 'wb') as fp:
558
+ pickle.dump(list(out['itemid'].unique()), fp)
559
+ self.out_vocab = out['itemid'].nunique()
560
+ metaDic['Out']=self.out_per_adm
561
+
562
+ if(self.feat_chart):
563
+ with open("./data/dict/chartVocab", 'wb') as fp:
564
+ pickle.dump(list(chart['itemid'].unique()), fp)
565
+ self.chart_vocab = chart['itemid'].nunique()
566
+ metaDic['Chart']=self.chart_per_adm
567
+
568
+ if(self.feat_cond):
569
+ with open("./data/dict/condVocab", 'wb') as fp:
570
+ pickle.dump(list(self.cond['new_icd_code'].unique()), fp)
571
+ self.cond_vocab = self.cond['new_icd_code'].nunique()
572
+ metaDic['Cond']=self.cond_per_adm
573
+
574
+ if(self.feat_proc):
575
+ with open("./data/dict/procVocab", 'wb') as fp:
576
+ pickle.dump(list(proc['itemid'].unique()), fp)
577
+ self.proc_vocab = proc['itemid'].nunique()
578
+ metaDic['Proc']=self.proc_per_adm
579
+
580
+ with open("./data/dict/metaDic", 'wb') as fp:
581
+ pickle.dump(metaDic, fp)
los.config ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ disease_label: null
2
+ time: 3
3
+ label: Length of Stay
4
+ timeW: First 24 hours
5
+ bucket: 2
6
+ radimp: forward fill and mean
7
+ predW: 0
8
+ diagnosis: True
9
+ output: True
10
+ chart: True
11
+ proc: True
12
+ meds: True
13
+ disease_filter: No Disease Filter
14
+ icu_no_icu: ICU
15
+ groupingICD: Convert ICD-9 to ICD-10 and group ICD-10 codes
16
+ select_diag: False
17
+ select_med: False
18
+ select_proc: False
19
+ select_out: False
20
+ select_chart: False
21
+ outlier_removal: Impute Outlier (default:98)
22
+ outlier: 98
23
+ left_outlier: 0
mortality.config ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ disease_label: null
2
+ time: 0
3
+ label: Mortality
4
+ timeW: First 48 hours
5
+ bucket: 2
6
+ radimp: forward fill and mean
7
+ predW: 6
8
+ diagnosis: True
9
+ output: True
10
+ chart: True
11
+ proc: True
12
+ meds: True
13
+ disease_filter: No Disease Filter
14
+ icu_no_icu: ICU
15
+ groupingICD: Convert ICD-9 to ICD-10 and group ICD-10 codes
16
+ select_diag: False
17
+ select_med: False
18
+ select_proc: False
19
+ select_out: False
20
+ select_chart: False
21
+ outlier_removal: Impute Outlier (default:98)
22
+ outlier: 98
23
+ left_outlier: 0
phenotype.config ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ disease_label: I50
2
+ time: 30
3
+ label: Readmission
4
+ timeW: Last 72 hours
5
+ bucket: 2
6
+ radimp: forward fill and mean
7
+ predW: 0
8
+ diagnosis: True
9
+ output: True
10
+ chart: True
11
+ proc: True
12
+ meds: True
13
+ disease_filter: No Disease Filter
14
+ icu_no_icu: ICU
15
+ groupingICD: Convert ICD-9 to ICD-10 and group ICD-10 codes
16
+ select_diag: False
17
+ select_med: False
18
+ select_proc: False
19
+ select_out: False
20
+ select_chart: False
21
+ outlier_removal: Impute Outlier (default:98)
22
+ outlier: 98
23
+ left_outlier: 0
readmission.config ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ disease_label: null
2
+ time: 30
3
+ label: Readmission
4
+ timeW: Last 72 hours
5
+ bucket: 2
6
+ radimp: forward fill and mean
7
+ predW: 0
8
+ diagnosis: True
9
+ output: True
10
+ chart: True
11
+ proc: True
12
+ meds: True
13
+ disease_filter: No Disease Filter
14
+ icu_no_icu: ICU
15
+ groupingICD: Convert ICD-9 to ICD-10 and group ICD-10 codes
16
+ select_diag: False
17
+ select_med: False
18
+ select_proc: False
19
+ select_out: False
20
+ select_chart: False
21
+ outlier_removal: Impute Outlier (default:98)
22
+ outlier: 98
23
+ left_outlier: 0