package ddiextractor13;

import Types.DrugDDIFile;
import Types.Sentence;
import createArff.CreateArffFile;
import createArff.ListOfWords;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import readData.PreProcess;
import readData.Reading;

public class DDIExtractor13 {

    public static void main(String[] args) {
        List<DrugDDIFile> files;
        Reading readData = new Reading();
        files = readData.readFromDir("bin/Train/DrugBank/", "DrugBank");
        //files = readData.readFromDir("bin/Train/MedLine/", "MedLine");
//        for (int i = 0; i < files.size(); i++) {
        //System.out.println("New Sentence:");
//            files.get(i).printAllSent();
//        }
        PreProcess p = new PreProcess();
        p.preProcessSentences(files);
        ListOfWords low = new ListOfWords();
        low.createListOfWords(files);

//        int countSentence = 0;
//        int countInteractions = 0;
//        int countSingleInteractionSentences = 0;
//        for (int i = 0; i < files.size(); i++) {
//            DrugDDIFile df = files.get(i);
//            List<Sentence> lstSentence = df.getLstSentence();
//            for (Sentence sentence : lstSentence) {
//                countSentence++;
//                countInteractions += sentence.getDdiList().size();
//                if(sentence.getDdiList().size() <= 1){
//                    countSingleInteractionSentences++;
//                }
//            }
//        }
//        System.out.println("countSentence: " + countSentence);
//        System.out.println("countInteractions: " + countInteractions);
//        System.out.println("countSingleInteractionSentences: " + countSingleInteractionSentences);

        CreateArffFile caf = new CreateArffFile();
        caf.writeAttributes(low);
        caf.writeData(files, low);
    }
}
