package edu.unc.ils.nasa;

import java.io.File;


import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.xml.namespace.QName;

import maui.stemmers.PorterStemmer;
import maui.stemmers.Stemmer;

import org.apache.commons.collections15.Bag;
import org.apache.commons.collections15.bag.HashBag;
import org.apache.commons.io.FileUtils;
import org.openrdf.concepts.skos.core.Concept;
import org.openrdf.elmo.ElmoModule;
import org.openrdf.elmo.sesame.SesameManager;
import org.openrdf.elmo.sesame.SesameManagerFactory;
import org.openrdf.repository.Repository;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.sail.nativerdf.NativeStore;


public class TermIndex 
{    
    Map<String, Integer> termCountMap = new HashMap<String, Integer>();
    Map<String, Bag<String>> termConceptMap = new HashMap<String, Bag<String>>();
    List<String> stopwords = new ArrayList<String>();
    Stemmer stemmer = new PorterStemmer();
    
    public static void main(String[] args) throws Exception
    {
        TermIndex kb = new TermIndex();
        kb.loadStopwords("src/main/resources/stopwords_en.txt");
        kb.load("/Users/cwillis/dev/hive/hive-data/nalt/naltStoreEn");
        kb.dumpFreq();
    }

    public void dumpFreq() throws IOException
    {
        FileWriter fw = new FileWriter("terms.txt");
        for (String key: termCountMap.keySet())
        {
            fw.write(key + "|" + termCountMap.get(key) + "|" + termConceptMap.get(key).uniqueSet().size() + "\r\n");
        }
        fw.close();
    }
    public String removeStopwords(String segment)
    {
        StringBuffer stopped = new StringBuffer();
        
        String[] words = segment.split(" ");
        
        for (String word: words) {
            if (!stopwords.contains(word))
            {
                if (stopped.length() > 0)
                    stopped.append(" ");
                stopped.append(word);
            }
                
        }
        return stopped.toString();
    }
    

    
    // NASA uses a separate phrase-matching file. For now, we'll just use the vocabulary
    // as the source for the KB.
    public void load(String path) throws Exception
    {
        // Open the Sesame repository
        String indexes = "spoc,ospc";
        NativeStore store = new NativeStore(new File(path), indexes);
        Repository repository = new SailRepository(store);
        repository.initialize();
        ElmoModule module = new ElmoModule();
        SesameManagerFactory factory = new SesameManagerFactory(module, repository);
        SesameManager manager = factory.createElmoManager(); 
        
        for (Concept concept:  manager.findAll(Concept.class))
        {
            String prefLabel = concept.getSkosPrefLabel();
            Set<String> altLabels = concept.getSkosAltLabels();
            QName qname = concept.getQName();
            String uri = qname.getNamespaceURI() + qname.getLocalPart();

            addPhrase(prefLabel, uri);
            for (String altLabel: altLabels)
                addPhrase(altLabel, uri);
        }
        
        repository.shutDown();
        store.shutDown();
    }
    
    public void loadStopwords(String path) throws Exception
    {
        stopwords = FileUtils.readLines(new File(path), "UTF-8");
    }
    
    public void addPhrase(String phrase, String uri)
    {
        
        boolean isProper = Character.isUpperCase(phrase.charAt(0));
        

        // Handle qualified names and expanded acronyms
        if (phrase.contains(" ("))
        {
            phrase = phrase.replaceAll("\\(", "");
            phrase = phrase.replaceAll("\\)", "");
        }
            
        String[] words = phrase.split(" ");
        for (String word: words)
        {
            if (!stopwords.contains(word))
            {
                Integer count = termCountMap.get(word);
                if (count == null)
                    count = 1;
                else
                    count++;
                termCountMap.put(word, count);
                
                Bag<String> bag = termConceptMap.get(word);
                if (bag == null)
                    bag = new HashBag<String>();
                
                bag.add(uri);
                termConceptMap.put(word, bag);
            }
        }    
    }
}
