package co.edu.unal.bioingenium.kbmed.knowledge.mapping.main;

import co.edu.unal.bioingenium.kbmed.data.PhysicalDocument;
import co.edu.unal.bioingenium.kbmed.data.loader.DocumentLoader;
import co.edu.unal.bioingenium.kbmed.data.loader.DocumentLoaderFactory;
import co.edu.unal.bioingenium.kbmed.data.loader.TXTLoader;
import co.edu.unal.bioingenium.kbmed.knowledge.mapping.impl.SoftMapping;
import co.edu.unal.bioingenium.kbmed.knowledge.mapping.vo.ConceptIdentified;
import co.edu.unal.bioingenium.kbmed.knowledge.ontology.OntologyMetaData;
import co.edu.unal.bioingenium.kbmed.knowledge.snomed.SnomedMetaData;
import co.edu.unal.bioingenium.kbmed.text.filter.FilterPipe;
import co.edu.unal.bioingenium.kbmed.text.representation.Corpus;
import co.edu.unal.bioingenium.kbmed.text.representation.Document;
import co.edu.unal.bioingenium.kbmed.util.io.FileManager;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 *
 * @author Luis A Riveros
 */
public class IndexByConceptsMain {

    
    
    /**
     * 
     * @param corpusName
     * @param corpusPath
     * @param pruneBellow 
     * @param pruneAbove 
     * @return
     */
    public Corpus buildCorpus(String corpusName, String corpusPath, int pruneBellow, int pruneAbove) {
        corpusPath = FileManager.toStandardPath(corpusPath);
        FilterPipe filterPipe = new FilterPipe();
        //TODO better corpus generation, the building process must be incremental based on the changes or the aparison of new documents in the collections
        Corpus corpus = new Corpus(corpusName);
        //Initializing varibles
        Document document;
        List<String> fileList;
        List<String> words;
        PhysicalDocument physicalDocument;
        String content;
        DocumentLoader documentLoader;
        int count = 1;
        try {
            System.out.print("Getting file list...");
            fileList = FileManager.getRecursivelyFileList(corpusPath);
            System.out.print("Success\n");
            for (String fileName : fileList) {
                System.out.print((count++) + "/" + fileList.size() + " Parsing " + fileName + "...");
                try {
                    documentLoader = DocumentLoaderFactory.getDocumentLoader(corpusPath + fileName);
                    physicalDocument = documentLoader.parseDocument(corpusPath + fileName);
                    if (physicalDocument != null) {
                        document = new Document(physicalDocument);
                        content = physicalDocument.getContent();
                        words = filterPipe.filter(content);
                        for (String word : words) {
                            //Adding global occurrence 
                            corpus.addWordOccurrence(word);
                            if (!document.containsWord(word)) {
                                //Add in-document occurrente
                                corpus.addDocumentOccurence(word);
                            }
                            //Add document details
                            document.addWordOccurrence(word);
                            corpus.addDocument(document);
                        }
                        System.out.print("Success\n");
                    } else {
                        System.out.print("Failed\n");
                    }
                } catch (ClassNotFoundException ex) {
                    Logger.getLogger(DocumentLoaderFactory.class.getName()).log(Level.SEVERE, "Loader for document " + fileName + " not found.", ex);
                }
            }


            return corpus;
        } catch (IOException ex) {
            Logger.getLogger(IndexByConceptsMain.class.getName()).log(Level.SEVERE, null, ex);
        }
        return null;
    }
    /**
     * 
     * @param args
     */
    public static void main(String[] args) {
        try {
            String corpusPath = "D:\\Tesis\\Datos\\saruro_txt\\historias";
            DocumentLoader documentLoader = new TXTLoader();
            List<String> fileList = FileManager.getRecursivelyFileList(corpusPath);
            OntologyMetaData ontologyMetaData = new SnomedMetaData();
            Map<String, List<ConceptIdentified>> mapResults;
            SoftMapping mapping = new SoftMapping(ontologyMetaData);
            mapping.init();
            mapping.setVerbose(true);
            for (String file : fileList) {
                mapResults = mapping.doMapping(documentLoader.parseDocument(corpusPath + file).getContent());
            }
        } catch (IOException ex) {
            Logger.getLogger(IndexByConceptsMain.class.getName()).log(Level.SEVERE, null, ex);
        }


    }
}
