package query_understanding.query_understand.tree_generator;

import edu.stanford.nlp.ling.IndexedWord;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphEdge;
import javafx.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import query_understanding.common.nlp_toolkit.DependencyParser;
import query_understanding.common.nlp_toolkit.RelationEdge;
import query_understanding.common.nlp_toolkit.RelationTriplePath;
import query_understanding.query_language.doc_entity_tree.*;
import query_understanding.query_understand.tree_progress.DocumentQueryProgress;
import query_understanding.query_understand.tree_progress.EntityQueryProgress;
import scala.tools.cmd.gen.AnyVals;
import semantic_compute.entity_linking.schema.EntityLinkingResult;
import semantic_compute.relation_extraction.schema.RelationMatchResult;
import semantic_compute.relation_extraction.yago_matcher.YagoRelationMatcher;

import java.util.*;

/**
 * Created by julianzliu on 4/26/2017.
 */

//@Component
public class ExtraWordExtender {

    static final Logger logger = LoggerFactory.getLogger( ExtraWordExtender.class );

    private DocumentQueryProgress progress;
    private EntityQueryProgress entityProgress;
    private CoreChainIdentifier coreChainIdentifier;
    private List<RelationTriplePath> relationList;


    private Set<EntityLinkingResult.LinkedEntity> extendableEntities;
    private Set<String> extendableConceptes;
    private Set<IndexedWord> extendableKeywords;
    private Set<IndexedWord> extendableRelations;

    public ExtraWordExtender(DocumentQueryProgress progress,EntityQueryProgress entityProgress){
        this.progress = progress;
        this.entityProgress = entityProgress;
        this.coreChainIdentifier = new CoreChainIdentifier(progress, entityProgress);
        this.coreChainIdentifier.setYagoRelationMatcher( new YagoRelationMatcher() );

        this.extendableEntities = new HashSet<>();
        this.extendableConceptes = new HashSet<>();
        this.extendableKeywords = new HashSet<>();
        this.extendableRelations = new HashSet<>();
    }




    public DocumentQueryProgress extendExtraWords(){
        PriorityQueue<EntityQueryTree> treePriorityQueue = this.entityProgress.treePriorityQueue;
        PriorityQueue<EntityQueryTree> newTreeQueue = this.entityProgress.genEmptyPriorityQueue();

        Integer index = 0;
        for(EntityQueryTree old_tree: treePriorityQueue){
            index += 1;
            //if(index != 1) continue;

            System.out.println("\n\n[Extend Tree]................");
            EntityQueryProgress.deepFirstTraversal( old_tree );
            EntityQueryTree new_tree = extendTree( old_tree );
            newTreeQueue.add( new_tree );

        }
        this.entityProgress.treePriorityQueue = newTreeQueue;
        this.entityProgress.extendFinishedQueue();
        return this.progress;
    }




    /***********************************************************
     * 目前的解决方案是按照一组规则扩展，
     * 每一步都是确定的行为，
     * 所以一棵树不会扩展得到多棵树。
     * @param old_tree
     * @return
     */
    private EntityQueryTree extendTree(EntityQueryTree old_tree){

        initQueue( old_tree );

        extend( old_tree );

        return old_tree;
    }



    /*
    扫描一遍已生成的查询树,
    先识别所有
     */
    private void initQueue(EntityQueryTree tree){
        /*
        Entities
         */
        this.extendableEntities.clear();
        this.extendableRelations.clear();
        this.extendableConceptes.clear();
        this.extendableKeywords.clear();
        Set<IndexedWord> unvisited = tree.unVisited;
        for(IndexedWord word : unvisited){
            if( this.entityProgress.wordEntityMap.containsKey( word ) ){
                this.extendableEntities.add( this.entityProgress.wordEntityMap.get( word ) );
            }
            else if( this.entityProgress.wordConceptMap.containsKey( word ) ){
                this.extendableConceptes.add( this.entityProgress.wordConceptMap.get(word) );
            }
            else {
                if(EntityTreeInitializer.VerbsSet.contains( word.tag() )){
                    this.extendableRelations.add( word );
                }
                else {
                    this.extendableKeywords.add( word );
                }
            }
        }
    }

    private boolean checkNeedToExtend(){
        System.out.println("\n\n[checkNeedToExtend]---------------------------");
        for(EntityLinkingResult.LinkedEntity entity:this.extendableEntities){
            System.out.println("[entity] " + entity.entity + " " + entity.name);
        }
        for(String concept: this.extendableConceptes){
            System.out.println("[concept] " + concept);
        }
        for(IndexedWord keyword: this.extendableKeywords){
            System.out.println("[keyword] " + keyword);
        }
        for(IndexedWord relation : this.extendableRelations){
            System.out.println("[relation] " + relation);
        }
        if(this.extendableEntities.size() > 0 || this.extendableConceptes.size()> 0 || this.extendableKeywords.size() > 0 || this.extendableRelations.size() > 0)
            return  true;
        return false;
    }


    public List<RelationTriplePath> getExtendablePoint( EntityQueryTree tree , Set<IndexedWord> visitedParserNodes, Set<SemanticGraphEdge> visitedParserEdges){

        this.relationList = new ArrayList<>();

        Set<QueryNode.BaseNode> visitedNodes = new HashSet<>();
        Set<QueryEdge.BaseEdge> visitedEdges = new HashSet<>();
        recurrNode(tree,tree.topicEntity, visitedNodes, visitedEdges, visitedParserNodes, visitedParserEdges);

        return this.relationList;
    }

    private void recurrNode(EntityQueryTree tree,QueryNode.BaseNode root,Set<QueryNode.BaseNode> visitedNodes, Set<QueryEdge.BaseEdge> visitedEdges,
                            Set<IndexedWord> visitedParserNodes,Set<SemanticGraphEdge> visitedParserEdges){
        if( relationList.size() > 0){
            return;
        }
        visitedNodes.add( root );
        IndexedWord current = tree.eleTokenMap.get( root ).mainToken;
        //SemanticGraph dependencies = this.entityProgress.getDependencies();
        System.out.println("\n\n[getExtendablePoint] start from " + current + "\n");
        List<RelationTriplePath> extractRelations = extractAllPathFromDependencyTree(tree, current, visitedParserNodes, visitedParserEdges);
        if(extractRelations.size() > 0){
            relationList.addAll( extractRelations );
            return;
        }
        for(QueryEdge.BaseEdge edge: root.edgesSet ){
            if(visitedEdges.contains( edge )) continue;;
            recurrEdge(tree,edge, visitedNodes, visitedEdges, visitedParserNodes, visitedParserEdges);
        }
    }

    private void recurrEdge(EntityQueryTree tree,QueryEdge.BaseEdge edge, Set<QueryNode.BaseNode> visitedNodes, Set<QueryEdge.BaseEdge> visitedEdges,
                             Set<IndexedWord> visitedParserNodes,Set<SemanticGraphEdge> visitedParserEdges){
        if(relationList.size() > 0){
            return;
        }
        visitedEdges.add(edge);
        for(QueryNode.BaseNode root: edge.nodesSet ){
            if(visitedNodes.contains(root)) continue;
            recurrNode(tree,root, visitedNodes, visitedEdges, visitedParserNodes, visitedParserEdges);
        }
    }

    private void extend(EntityQueryTree old_tree){

        Set<IndexedWord> visitedNodes = old_tree.visited;

        Set<SemanticGraphEdge> visitedEdges = old_tree.visitedEdges ;

        Integer round = 0;

        while ( checkNeedToExtend() ){

            Boolean succes = false;

            /*
            先从已经生成的树向外扩展，
            直到已经生成的树的节点无法再找到可扩展的关系，
            再从离散的其他类型的节点扩展。
             */



            if(this.extendableEntities.size() > 0){
                EntityLinkingResult.LinkedEntity entity = this.extendableEntities.iterator().next();
                if( extendEntities( old_tree, entity, visitedNodes, visitedEdges) == true){

                    this.extendableEntities.remove( entity );

                    succes = true;
                }

            }

            if(succes == false ){
                List<RelationTriplePath> relations = getExtendablePoint( old_tree, visitedNodes, visitedEdges );
                System.out.println("\n\n[getExtendablePoint] size = " + relations.size() + "\n\n");
                if (relations.size() > 0){
                    succes = extendRelationPath(old_tree, relations);
                }
            }

            if(succes == false && this.extendableConceptes.size()> 0 ){

            }

            if( succes == false && this.extendableKeywords.size() > 0 ) {

            }

            if(succes == false && this.extendableRelations.size() > 0){

            }

            if(succes == false){
                System.out.print("\n\n\n\n" + "Extend Error! Round = " + round  + "\n\n\n\n");
                break;
            }

            round += 1;

            initQueue(old_tree);

        }
    }


    private Boolean extendRelationPath(EntityQueryTree tree, List<RelationTriplePath> relations){
        System.out.println("[extendRelationPath] ..............");
        Boolean isGrowed = false;
        for(RelationTriplePath relationPath: relations){
            if(isGrowed) break;
            if(relationPath.path.size() != 1) continue;
            String relation_str = this.coreChainIdentifier.genRelationStr( tree, relationPath.path.get(0));
            // b) 到知识图谱中匹配
            //List<RelationMatchResult> results = this.coreChainIdentifier.matchLengthOneRelation( entity.entity, relation_str);
            // Set<String> yagoRelations = this.coreChainIdentifier.deleteDuplicatedLengthOneResult(results);
            // 1) Candicate
            List<Pair<String,Long>> canditateRelations = this.coreChainIdentifier.getCanditateRelations(relation_str);
            // 2) Filter, 知识图谱中的关系
            List<String> edges = this.coreChainIdentifier.getCanditateEdges(canditateRelations);
            Set<String> matchedRelations = new HashSet<>( edges );
            IndexedWord connectToken = relationPath.path.get(0).linkPoint;
            QueryNode.BaseNode connectNode = (QueryNode.BaseNode)(tree.nodeInvertMap.get( connectToken ).queryElement);
            extendLengthOneTree( tree, connectNode, relationPath, matchedRelations);
            isGrowed = true;
        }
        return isGrowed;
    }


    private boolean extendLengthOneTree(EntityQueryTree new_tree, QueryNode.BaseNode connectNode, RelationTriplePath relationPath, Set<String> matchedRelations){
        String yagoPath = this.coreChainIdentifier.selectBestLengthOneEdge(matchedRelations);
        System.out.println("[extendLengthOneTree] yagoPath = " + yagoPath);
        System.out.println("[extendLengthOneTree] connectNode = " + connectNode.nodeType );
        RelationEdge.BaseRelation relation = relationPath.path.get(0);


        if(yagoPath == null){
            /*
            创建 occur-with 边
             */
            QueryEdge.OccurEdge edge1 = new QueryEdge.OccurEdge();
            edge1.text = relation.relation.word();
            edge1.mainToken = relation.relation;
            new_tree.addIndexWord( edge1.mainToken,edge1 );
            edge1.nodesSet.add( connectNode );
            connectNode.edgesSet.add( edge1 );

            this.coreChainIdentifier.addOtherSideNode(new_tree, relation, edge1);

            System.out.println("[extendLengthOneTree] successfully grow a Occur edge!\n\n");

        }
        else{
            //QueryNode.EntityNode root = (QueryNode.EntityNode)new_tree.topicEntity;

            QueryEdge.YagoEdge edge1 = new QueryEdge.YagoEdge();
            edge1.mainToken = relation.relation;
            new_tree.addIndexWord( edge1.mainToken, edge1 );
            edge1.yago_label = yagoPath;
            edge1.nodesSet.add( connectNode );
            connectNode.edgesSet.add( edge1 );

            this.coreChainIdentifier.addOtherSideNode(new_tree, relation, edge1);

            System.out.println("[extendLengthOneTree] successfully grow a yago edge!\n\n");

        }
        new_tree.addRelation( relation );
        return true;
    }


    /**********************
     * 连接实体与树
     * @param tree
     * @param entity
     * @param visitedNodes
     * @param visitedEdges
     * @return
     */
    private Boolean extendEntities(EntityQueryTree tree, EntityLinkingResult.LinkedEntity entity, Set<IndexedWord> visitedNodes, Set<SemanticGraphEdge> visitedEdges){
        IndexedWord current = this.entityProgress.entityTokenMap.get(entity).mainToken;
        List<RelationTriplePath> relations = extractAllPathFromDependencyTree( tree, current, visitedNodes, visitedEdges);

        Map< RelationTriplePath, Map<String,Set<String>> >  lengthTwoRelations  = new HashMap<>();
        Map< RelationTriplePath, Set<String> >  lengthOneRelations  = new HashMap<>();
        for(RelationTriplePath relationPath : relations){
            if(relationPath.path.size() == 2) {
                // a）获取文本表示
                String relation_str1 = this.coreChainIdentifier.genRelationStr(tree, relationPath.path.get(0));
                String relation_str2 = this.coreChainIdentifier.genRelationStr(tree, relationPath.path.get(1));

                // b) 到知识图谱中匹配
                List<RelationMatchResult> results = this.coreChainIdentifier.matchLengthTwoRelation(entity.entity, relation_str1, relation_str2);
                Map<String, Set<String>> yagoRelations = this.coreChainIdentifier.deleteDuplicatedLengthTwoResult(results);

                lengthTwoRelations.put(relationPath, yagoRelations);
            }
            if(relationPath.path.size() == 1){
                // a）获取文本表示
                String relation_str = this.coreChainIdentifier.genRelationStr( tree, relationPath.path.get(0));
                // b) 到知识图谱中匹配
                List<RelationMatchResult> results = this.coreChainIdentifier.matchLengthOneRelation( entity.entity, relation_str);
                Set<String> yagoRelations = this.coreChainIdentifier.deleteDuplicatedLengthOneResult(results);

                lengthOneRelations.put( relationPath, yagoRelations );
            }
        }


        boolean succes = false;
        if(lengthOneRelations.size() > 0){
            for(RelationTriplePath relationPath: lengthOneRelations.keySet()){
                if(succes == true) continue;
                IndexedWord otherside = relationPath.path.get(0).otherSide;
                // otherside 是否是变量节点
                System.out.println("[otherSide = ] " +  otherside);
                if( tree.nodeInvertMap.containsKey( otherside ) ) { //&& ((QueryNode.BaseNode)(tree.nodeInvertMap.get( otherside ).queryElement)).nodeType == QueryNode.NodeType.VARIABLE)
                    QueryNode.BaseNode connectNnde = (QueryNode.BaseNode)(tree.nodeInvertMap.get( otherside ).queryElement);
                    System.out.println("[otherSide = ] " +  otherside + "  ,  connectNode  = " + connectNnde + " hash = " + connectNnde.hashCode() );
                    Set<String> matchedRelations = lengthOneRelations.get( relationPath );
                    growLengthOneTree(tree, entity, connectNnde, relationPath, matchedRelations );
                    succes = true;
                }
            }
        }
        if(succes == false && lengthTwoRelations.size() > 0){
            for(RelationTriplePath relationPath: lengthOneRelations.keySet()){
                if(succes == true) continue;
                IndexedWord otherside = relationPath.path.get(1).otherSide;
                // otherside 是否是变量节点
                System.out.println("[otherSide = ] " +  otherside);
                if( tree.nodeInvertMap.containsKey( otherside ) ) { //&& ((QueryNode.BaseNode)(tree.nodeInvertMap.get( otherside ).queryElement)).nodeType == QueryNode.NodeType.VARIABLE)
                    QueryNode.BaseNode connectNnde = (QueryNode.BaseNode)(tree.nodeInvertMap.get( otherside ).queryElement);
                    System.out.println("[otherSide = ] " +  otherside + "  ,  connectNode = " + connectNnde );
                    Map<String,Set<String>>  matchedRelations = lengthTwoRelations.get( relationPath );
                    succes = growLengthTwoTree(tree, entity, connectNnde, relationPath, matchedRelations );
                }
            }
        }
        if(succes)
            visitedNodes.add( current );
        return succes;
    }


    private boolean growLengthOneTree(EntityQueryTree new_tree, EntityLinkingResult.LinkedEntity mention, QueryNode.BaseNode connectNode, RelationTriplePath relationPath, Set<String> matchedRelations){
        String yagoPath = this.coreChainIdentifier.selectBestLengthOneEdge(matchedRelations);
        System.out.println("[growLengthOneTree] yagoPath = " + yagoPath);
        System.out.println("[growLengthOneTree] connectNode = " + connectNode.nodeType );
        RelationEdge.BaseRelation relation = relationPath.path.get(0);

        QueryNodeTokenMap nodeTokenMap = this.entityProgress.entityTokenMap.get(mention);

        QueryNode.EntityNode topicNode = new QueryNode.EntityNode();
        topicNode.entity = mention;
        topicNode.mainToken = nodeTokenMap.mainToken;
        topicNode.allTokens = nodeTokenMap.tokenSet;
        topicNode.yago_id = this.progress.entityLinkingResult.entityMetadata.get( mention.entity ).YAGO_ID;
        topicNode.yago_label = topicNode.yago_id;
        topicNode.score = mention.score;

        nodeTokenMap.queryElement = topicNode;
        new_tree.addQueryNodeTokenMapOfNode( nodeTokenMap );
        //new_tree.addIndexWord(topicNode.mainToken, topicNode);
        new_tree.treeScore += topicNode.score;

        if(yagoPath == null){
            /*
            创建 occur-with 边
             */
            QueryEdge.OccurEdge edge1 = new QueryEdge.OccurEdge();
            edge1.text = relation.relation.word();
            edge1.mainToken = relation.relation;
            new_tree.addIndexWord( edge1.mainToken,edge1 );
            edge1.nodesSet.add( topicNode );
            topicNode.edgesSet.add( edge1 );

            //addOtherSideNode(new_tree, relation, edge1);
            edge1.nodesSet.add( connectNode );
            connectNode.edgesSet.add( edge1 );


            System.out.println("[growLengthOneTree] successfully grow a Occur edge!\n\n");
            //EntityQueryProgress.deepFirstTraversalFromNode(new_tree, connectNode);

        }
        else{
            //QueryNode.EntityNode root = (QueryNode.EntityNode)new_tree.topicEntity;

            QueryEdge.YagoEdge edge1 = new QueryEdge.YagoEdge();
            edge1.mainToken = relation.relation;
            new_tree.addIndexWord( edge1.mainToken, edge1 );
            edge1.yago_label = yagoPath;
            edge1.nodesSet.add( topicNode );
            topicNode.edgesSet.add( edge1 );

            edge1.nodesSet.add( connectNode );
            connectNode.edgesSet.add( edge1 );

            System.out.println("[growLengthOneTree] successfully grow a yago edge!\n\n");

        }
        new_tree.addRelation( relation );
        return true;
    }


    private boolean growLengthTwoTree(EntityQueryTree new_tree, EntityLinkingResult.LinkedEntity mention, QueryNode.BaseNode connectNode,RelationTriplePath relationPath, Map<String,Set<String>> matchedRelations){
        Pair<String,String> yagoPath = this.coreChainIdentifier.selectBestLengthTwoEdge( matchedRelations );
        RelationEdge.BaseRelation relation = relationPath.path.get(0);
        if(yagoPath == null){
            return false;
        }
        else{
            QueryNodeTokenMap nodeTokenMap = this.entityProgress.entityTokenMap.get(mention);

            QueryNode.EntityNode topicNode = new QueryNode.EntityNode();
            topicNode.entity = mention;
            topicNode.mainToken = nodeTokenMap.mainToken;
            topicNode.allTokens = nodeTokenMap.tokenSet;
            topicNode.yago_id = this.progress.entityLinkingResult.entityMetadata.get( mention.entity ).YAGO_ID;
            topicNode.yago_label = topicNode.yago_id;
            topicNode.score = mention.score;

            //new_tree.addQueryNodeTokenMapOfNode( nodeTokenMap );
            new_tree.addIndexWord( topicNode.mainToken, topicNode);
            new_tree.treeScore += topicNode.score;



            System.out.println("[growLengthTwoTree] yagoPath = " + yagoPath.toString());
            //EntityQueryTree new_tree = old_tree.deepCopyTree();//new EntityQueryTree( old_tree );
            //new_tree.treeScore = 4*old_tree.treeScore;
            QueryNode.EntityNode root = topicNode; //(QueryNode.EntityNode)new_tree.topicEntity;

            QueryEdge.YagoEdge edge1 = new QueryEdge.YagoEdge();
            edge1.nodesSet.add( root );
            edge1.yago_label = yagoPath.getKey();
            edge1.mainToken = relationPath.path.get(0).relation;
            new_tree.addRelation( relationPath.path.get(0) );
            // TODO: should add all tokens
            new_tree.addIndexWord( edge1.mainToken, edge1 );
            root.edgesSet.add( edge1 );

            /*
            QueryNode.VariableNode var1 = new QueryNode.VariableNode();
            var1.edgesSet.add( edge1 );
            edge1.nodesSet.add( var1 );
            */
            QueryNode.BaseNode node1 = this.coreChainIdentifier.addOtherSideNode(new_tree,relationPath.path.get(0),edge1);

            QueryEdge.YagoEdge edge2 = new QueryEdge.YagoEdge();
            edge2.yago_id = null;
            edge2.yago_label = yagoPath.getValue();
            edge2.mainToken = relationPath.path.get(1).relation;
            new_tree.addRelation( relationPath.path.get(1) );
            // TODO: should add all tokens
            new_tree.addIndexWord( edge2.mainToken, edge2 );
            edge2.nodesSet.add( node1 );
            node1.edgesSet.add(edge2);


            //QueryNode.BaseNode var2 = addOtherSideNode( new_tree, relationPath.path.get(1), edge2);
            connectNode.edgesSet.add( edge2 );
            edge2.nodesSet.add( connectNode );

            System.out.println("[growLengthTwoTree] successfully grow a tree! Two Edge\n\n");

            return true;
        }

        //return null;
    }


    private List<RelationTriplePath> extractAllPathFromDependencyTree(EntityQueryTree entityQueryTree, IndexedWord current, Set<IndexedWord> visitedNodes, Set<SemanticGraphEdge> visitedEdges){
        //List<RelationPath>  relations = DependencyParser.extractLengthTwoRelation(dependencies,current,visited );
        SemanticGraph dependencies = this.entityProgress.getDependencies();

        Set<IndexedWord> parserVisitedNodes = new HashSet<>( visitedNodes);

        Set<SemanticGraphEdge> parserVisitedEdges = new HashSet<>( visitedEdges );
        List<RelationTriplePath> relations = DependencyParser.extractAllPossiblePath( dependencies,current, parserVisitedNodes,  parserVisitedEdges);
        System.out.println("[extractAllPathFromDependencyTree] size = " + relations.size());
        for(RelationTriplePath relationTriplePath: relations){
            System.out.println("[relationTriplePath] " + relationTriplePath.toString() );
        }
        return relations;
    }





}
