package edu.unc.ils.analysis;

import java.io.File;
import java.io.FileWriter;
import java.io.StringReader;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.collections15.Bag;
import org.apache.commons.collections15.bag.TreeBag;
import org.apache.commons.math.stat.Frequency;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.util.Version;
import org.openrdf.concepts.skos.core.Concept;
import org.openrdf.elmo.ElmoModule;
import org.openrdf.elmo.sesame.SesameManager;
import org.openrdf.elmo.sesame.SesameManagerFactory;
import org.openrdf.repository.Repository;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.sail.nativerdf.NativeStore;

import com.aliasi.tokenizer.IndoEuropeanTokenizerFactory;
import com.aliasi.tokenizer.Tokenizer;
import com.aliasi.tokenizer.TokenizerFactory;

public class VocabularyAnalysis 
{
    static final TokenizerFactory TOKENIZER_FACTORY = IndoEuropeanTokenizerFactory.INSTANCE;
    
    Map<String, Integer> tokenMap = new HashMap<String, Integer>();
    Map<String, Set<Integer>> conceptMap = new HashMap<String, Set<Integer>>();
    int totalConcepts = 0;
    
    public static void main(String[] args) throws Exception
    {
        VocabularyAnalysis va = new VocabularyAnalysis();
        va.getTermFrequencies("/Users/cwillis/dev/hive/hive-data/mesh/meshStore");
        va.printTermFrequencies("mesh.frequencies.lower");
    }
    
    public void printTermFrequencies(String output) throws Exception
    {
        File file = new File(output);
        FileWriter fw = new FileWriter(file);
        Comparator<Map.Entry<String, Integer>> comparator = new Comparator<Map.Entry<String, Integer>>()  
        {  
            public int compare(Map.Entry<String, Integer> entry1, Map.Entry<String, Integer> entry2)  
            {  
                if(entry1.getValue() < entry2.getValue()) {
                    return 1;
                } else if(entry1.getValue() == entry2.getValue()) {
                    return 0;
                } else {
                    return -1;
                }
            }  
        };  

        List<Map.Entry<String, Integer>> sortedSet = new ArrayList<Map.Entry<String, Integer>>();
        sortedSet.addAll(tokenMap.entrySet());
        Collections.sort(sortedSet, comparator);
        
        int rank = 1;
        for (Map.Entry<String, Integer> entry: sortedSet)
        {
            int tf = entry.getValue();
            int conceptCount = conceptMap.get(entry.getKey()).size();
            double idf = Math.log(totalConcepts/conceptCount);
            fw.write(rank + "\t" + entry.getKey() + "\t" + tf + "\t" + conceptCount + "\n");
            rank++;
        }
        fw.flush();
        fw.close();
    }
    public void getTermFrequencies(String vocabularyPath) throws Exception
    {
        String indexes = "spoc,ospc";
        NativeStore store = new NativeStore(new File(vocabularyPath), indexes);
        Repository repository = new SailRepository(store);
        repository.initialize();
        ElmoModule module = new ElmoModule();
        SesameManagerFactory factory = new SesameManagerFactory(module, repository);
        SesameManager manager = factory.createElmoManager(); 
            
        int c = 0;
        for (Concept concept:  manager.findAll(Concept.class))
        {
            String prefLabel = concept.getSkosPrefLabel();
            indexPhrase(c, prefLabel);

            Set<String> altLabels = concept.getSkosAltLabels();
            for (String altLabel: altLabels)
            {
                indexPhrase(c, altLabel);
            }
            c++;
        }
        totalConcepts = c;
        repository.shutDown();
        store.shutDown();
    }
    
    public void increment(int conceptId, String token) 
    {        
        String tokenLower = token.toLowerCase();
        String stripped = "";
        char[] cs = tokenLower.toCharArray();
        for (char c: cs) {
            if (Character.isLetter(c) || Character.isDigit(c))
                stripped += c;
        }
        if (stripped.length() > 0)
        {
            Integer count = tokenMap.get(stripped);
            if (count == null)
                count = 1;
            else
                count++;
            tokenMap.put(stripped, count);
            
            Set<Integer> conceptIds = conceptMap.get(stripped);
            if (conceptIds == null)
                conceptIds = new HashSet<Integer>();
            conceptIds.add(conceptId);
            conceptMap.put(stripped, conceptIds);
        }
    }
    
    public void indexPhrase(int conceptId, String phrase)
    {
        char[] cs = phrase.toCharArray();
        Tokenizer tokenizer = TOKENIZER_FACTORY.tokenizer(cs, 0, cs.length);
        Iterator<String> iterator = tokenizer.iterator();
        while (iterator.hasNext())
        {
            String token = iterator.next();
            increment(conceptId, token);
        }
    }
    
    /*
     *     Map<String, Integer> termFrequency = new HashMap<String, Integer>();
    Map<String, Integer> documentFrequency = new HashMap<String, Integer>();
    
    public void countFrequencies(int docId, String phrase) throws Exception
    {
        StandardTokenizer tokenizer = new StandardTokenizer(Version.LUCENE_34, new StringReader(phrase));
        OffsetAttribute offsetAttribute = tokenizer.getAttribute(OffsetAttribute.class);
        CharTermAttribute charTermAttribute = tokenizer.getAttribute(CharTermAttribute.class);
        
        while (tokenizer.incrementToken())
        {
            int startOffset = offsetAttribute.startOffset();
            int endOffset = offsetAttribute.endOffset();
            String term = charTermAttribute.toString();
            
            Integer c1 = termFrequency.get(term);
            if (c1 == null)
                c1 = 0;
            c1++;
            termFrequency.put(term, c1);
            
            Integer c2 = termFrequency.get(term);
            if (c2 == null)
                c2 = 0;
            c2++;
            documentFrequency.put(term, c2);
        }
    }
    
    public void dumpTermFrequencies(String path) throws Exception
    {    
        // Open the Sesame repository
        String indexes = "spoc,ospc";
        NativeStore store = new NativeStore(new File(path), indexes);
        Repository repository = new SailRepository(store);
        repository.initialize();
        ElmoModule module = new ElmoModule();
        SesameManagerFactory factory = new SesameManagerFactory(module, repository);
        SesameManager manager = factory.createElmoManager(); 
        
        int conceptId = 0;
        for (Concept concept:  manager.findAll(Concept.class))
        {
            // Pref label
            String prefLabel = concept.getSkosPrefLabel();
            countFrequencies(conceptId, prefLabel);

            // Alt labels
            Set<String> altLabels = concept.getSkosAltLabels();
            for (String altLabel: altLabels)
                countFrequencies(conceptId, altLabel);
            
            // Scope notes
            Set<Object> scopeNotes = concept.getSkosScopeNotes();
            for (Object scopeNote: scopeNotes)
            {
                String sn = (String)scopeNote;
                countFrequencies(conceptId, sn);
            }
            conceptId++;
        }
        repository.shutDown();
        store.shutDown();
        
        FileWriter fw = new FileWriter("TermFrequency.txt");
        Frequency termF = new Frequency();
        Frequency docF = new Frequency();
        
        for (String term: termFrequency.keySet())
        {
            Integer termFreq = termFrequency.get(term);
            termF.addValue(termFreq);
            Integer docFreq = documentFrequency.get(term);
            docF.addValue(docFreq);
            fw.write(term + "|" + termFreq + "|" + docFreq + "\n");
        }
        fw.flush();
        
        Collection<Integer> docVals = documentFrequency.values();
        Bag<Integer> uniqueDocVals = new TreeBag<Integer>();
        for (Integer val: docVals)
            uniqueDocVals.add(val);
            
        DecimalFormat df = new DecimalFormat("##.#####");

        for (Integer val: uniqueDocVals.uniqueSet())
        {
            double pct = docF.getPct(val);
            fw.write(val + "|" + df.format(pct) + "\n");
        }
        fw.close(); 
    }
     */
}
