thbndi commited on
Commit
897aa25
1 Parent(s): fe38969

Update Mimic4Dataset.py

Browse files
Files changed (1) hide show
  1. Mimic4Dataset.py +10 -1
Mimic4Dataset.py CHANGED
@@ -21,7 +21,7 @@ Available tasks are: Mortality, Length of Stay, Readmission, Phenotype.
21
  The data is extracted from the mimic4 database using this pipeline: 'https://github.com/healthylaife/MIMIC-IV-Data-Pipeline/tree/main'
22
  mimic path should have this form : "path/to/mimic4data/from/username/mimiciv/2.2"
23
  If you choose a Custom task provide a configuration file for the Time series.
24
- Currently working with Mimic-IV ICU data.
25
  """
26
  _BASE_URL = "https://huggingface.co/datasets/thbndi/Mimic4Dataset/resolve/main"
27
  _HOMEPAGE = "https://huggingface.co/datasets/thbndi/Mimic4Dataset"
@@ -430,8 +430,17 @@ class Mimic4Dataset(datasets.GeneratorBasedBuilder):
430
  dico = pickle.load(fp)
431
 
432
  df = pd.DataFrame.from_dict(dico, orient='index')
 
433
  for i, data in df.iterrows():
434
  dyn_df,cond_df,demo=concat_data(data,self.interval,self.feat_cond,self.feat_proc,self.feat_out, self.feat_chart, self.feat_meds,self.feat_lab,self.condDict, self.procDict, self.outDict, self.chartDict, self.medDict)
 
 
 
 
 
 
 
 
435
  dyn=dyn_df.copy()
436
  dyn.columns=dyn.columns.droplevel(0)
437
  concat_cols = [f"{col}_{t}" for t in range(dyn.shape[0]) for col in dyn.columns]
 
21
  The data is extracted from the mimic4 database using this pipeline: 'https://github.com/healthylaife/MIMIC-IV-Data-Pipeline/tree/main'
22
  mimic path should have this form : "path/to/mimic4data/from/username/mimiciv/2.2"
23
  If you choose a Custom task provide a configuration file for the Time series.
24
+ Currently working with Mimic-IV version 1 and 2
25
  """
26
  _BASE_URL = "https://huggingface.co/datasets/thbndi/Mimic4Dataset/resolve/main"
27
  _HOMEPAGE = "https://huggingface.co/datasets/thbndi/Mimic4Dataset"
 
430
  dico = pickle.load(fp)
431
 
432
  df = pd.DataFrame.from_dict(dico, orient='index')
433
+ feat_tocsv=True
434
  for i, data in df.iterrows():
435
  dyn_df,cond_df,demo=concat_data(data,self.interval,self.feat_cond,self.feat_proc,self.feat_out, self.feat_chart, self.feat_meds,self.feat_lab,self.condDict, self.procDict, self.outDict, self.chartDict, self.medDict)
436
+ if feat_tocsv:
437
+ #save the features of the vector for analysis purposes if needed
438
+ feats = list(dyn_df.columns)
439
+ feats= feats.append(list(cond_df.columns))
440
+ feats= feats.append(list(demo.columns))
441
+ df_feats = pd.DataFrame(columns=feats)
442
+ df_feats.to_csv('./data/dict/'+self.config.name.replace(" ","_")+'/features_'+self.encoding'.csv')
443
+ feat_tocsv=False
444
  dyn=dyn_df.copy()
445
  dyn.columns=dyn.columns.droplevel(0)
446
  concat_cols = [f"{col}_{t}" for t in range(dyn.shape[0]) for col in dyn.columns]