package query_understanding.query_understand.tree_generator;

import com.ambiverse.api.model.Entity;
import com.orientechnologies.orient.core.metadata.security.OSystemUser;
import common.helper.toolkit.FileToolkit;
import edu.stanford.nlp.ling.IndexedWord;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphEdge;
import javafx.util.Pair;
import org.springframework.stereotype.Component;
import query_understanding.common.nlp_toolkit.RelationPath;
import query_understanding.query_language.doc_entity_tree.EntityQueryTree;
import query_understanding.query_understand.tree_progress.DocumentQueryProgress;
import query_understanding.query_understand.tree_progress.EntityQueryProgress;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;

/**
 * Created by julianzliu on 4/25/2017.
 */
//@Component
public class EntityTreeInitializer {
    /*
    1) http://www.ranks.nl/stopwords
    2) https://github.com/stanfordnlp/CoreNLP/blob/master/data/edu/stanford/nlp/patterns/surface/stopwords.txt
     */

    private DocumentQueryProgress progress;
    private EntityQueryProgress entityProgress;
    private Set<String> stopWords;

    /*
    https://gist.github.com/nlothian/9240750
     */
    private static String[] Nouns = {"NN", "NNS", "NNP", "NNPS"};
    public static Set<String> NounsSet = new HashSet<>( Arrays.asList(Nouns));
    private static String[] Verbs  = {"VB", "VBD", "VBG", "VBN", "VBP", "VBZ"};
    public static Set<String> VerbsSet = new HashSet<>( Arrays.asList(Verbs) );

    public EntityTreeInitializer(DocumentQueryProgress progress, EntityQueryProgress entityProgress){
        this.progress = progress;
        this.entityProgress = entityProgress;
        this.stopWords = loadStopWords();
    }

    public EntityTreeInitializer(){

    }



    public void genInitialTree(){
        EntityQueryTree initTree = new EntityQueryTree();
        this.entityProgress.initTree = initTree;

        genUnvisitedSet();

        filterStopWords();

    }

    private void filterNoneNounAndVerb(){
        Set<IndexedWord> unVi = this.entityProgress.initTree.unVisited;
        for(IndexedWord word: unVi){
            if(!this.NounsSet.contains( word.tag() ) && !this.VerbsSet.contains( word.tag() )){
                unVi.remove( word );
            }
        }
    }


    private void filterStopWords() {
        Set<IndexedWord> unVi = this.entityProgress.initTree.unVisited;
        Set<IndexedWord> needToRemove = new HashSet<>();
        for(IndexedWord word: unVi){
            if( this.stopWords.contains( word.word()) ){
                //unVi.remove( word );
                needToRemove.add(word);
            }
        }
        unVi.removeAll(needToRemove);
    }

    private void genUnvisitedSet(){
        Set<IndexedWord> allWords = this.entityProgress.initTree.allWords;
        SemanticGraph dependencies = this.entityProgress.getDependencies();
        Set<IndexedWord> visited = new HashSet<>();
        IndexedWord root = dependencies.getFirstRoot();
        deepFirstTraversal(dependencies, root, visited, allWords);
        this.entityProgress.initTree.unVisited = new HashSet<>( allWords );
    }

    private static void deepFirstTraversal(SemanticGraph dependencies, IndexedWord current, Set<IndexedWord> visited, Set<IndexedWord> allWords){
        allWords.add(current);
        visited.add(current);
        System.out.println(current);

        List<SemanticGraphEdge> inEdgesSorted = dependencies.getIncomingEdgesSorted(current);
        for (SemanticGraphEdge edge : inEdgesSorted) {
            //GrammaticalRelation relation = edge.getRelation();
            IndexedWord source = edge.getSource();
            if(visited.contains(source))
                continue;
            deepFirstTraversal(dependencies, source, visited, allWords);
        }
        List<SemanticGraphEdge> outEdgesSorted = dependencies.getOutEdgesSorted(current);
        for (SemanticGraphEdge edge : outEdgesSorted) {
            //GrammaticalRelation relation = edge.getRelation();
            IndexedWord target = edge.getTarget();
            if(visited.contains(target))
                continue;
            deepFirstTraversal(dependencies, target,  visited, allWords);
        }
    }


    /*
    https://www.mkyong.com/java/java-read-a-file-from-resources-folder/
     */
    private Set<String> loadStopWordsOld(){

        /*
        https://github.com/stanfordnlp/CoreNLP/blob/master/data/edu/stanford/nlp/patterns/surface/stopwords.txt
         */
        String fileName = "stopwords.txt";

        StringBuilder result = new StringBuilder("");
        Set<String> stopwords = new HashSet<>();

        //Get file from resources folder
        ClassLoader classLoader = getClass().getClassLoader();
        File file = new File(classLoader.getResource(fileName).getFile());

        try (Scanner scanner = new Scanner(file)) {

            while (scanner.hasNextLine()) {
                String line = scanner.nextLine();
                result.append(line).append("\n");
                stopwords.add(line);
            }
            scanner.close();

        } catch (IOException e) {
            e.printStackTrace();
        }
        return stopwords;
    }


    public Set<String> loadStopWords()
    {

        String fileName = "/stopwords.txt";

        //StringBuilder result = new StringBuilder("");
        Set<String> stopwords = new HashSet<>();

        try {
            InputStream in = EntityTreeInitializer.class.getResourceAsStream(fileName);

            //EntityTreeInitializer.class.getRe
            System.out.println("[in = ] " + in);
            try (Scanner scanner = new Scanner(in)) {

                while (scanner.hasNextLine()) {
                    String line = scanner.nextLine();
                    //result.append(line).append("\n");
                    stopwords.add(line);
                }
                scanner.close();

            } catch (Exception e) {
                e.printStackTrace();
            }

            in.close();
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return stopwords;
    }


    /********************************************************************************************
     *
     * @param args
     */
    public static void main(String[] args){
        Set<String> stopwords = new EntityTreeInitializer().loadStopWords();
        for(String stop: stopwords){
            System.out.println(stop);
        }
    }




}
