package semantic_compute.entity_linking.schema;

import algorithm.config.HumanLanguage;
import algorithm.nlp.corenlp.TextAnnotator;
import common.helper.json.JavaToJsonString;
import edu.stanford.nlp.coref.CorefCoreAnnotations;
import edu.stanford.nlp.coref.data.CorefChain;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.util.CoreMap;
import edu.stanford.nlp.util.IntPair;
import knowlwdge_graph.knowledge_etl.entity_etl.EntityExtractor;

import java.util.*;

/**
 * Created by julianzliu on 4/19/2017.
 */
public class EntityLinkingResult {

    public String originalText;
    public String annotatedText;


    public List<LinkedEntity> mentions;
    public Map<String,EntityMetadata> entityMetadata;


    // (实体的指称多个token的最末尾token) -> {实体}
    public Map<Integer, Map<Integer,Set<String>>> sentenceTokenEntitySet;


    public EntityLinkingResult(){
        this.originalText = null;
        this.annotatedText = null;
        this.sentenceTokenEntitySet = new HashMap<>();
    }




    public static class LinkedEntity{

        public Integer sentIndex;    // 从 0 开始
        public Integer startIndex;  //左闭区间，从1开始
        public Integer endIndex;     //右闭区间，从1开始

        public Integer offset;
        public Integer length;

        public String name;
        public String entity;  //指向EntityMetadata的键

        public Double score;  //链接分值
    }

    public static class EntityMetadata{
        public String knowledgebase;
        public String entityId;
        public Double importance;
        public String url;
        public String YAGO_ID;
        public String WIKIDATA_ID;
    }


    public boolean alignWithCoreNlp(){
        TextAnnotator textAnnotator = new TextAnnotator(HumanLanguage.ENGLISH);
        Annotation document = textAnnotator.getAnnotatedText( this.originalText );
        textAnnotator.pipeline.prettyPrint(document, System.out);
        System.out.println("\n\n[alignWithCoreNlp] genarated document ----------------  \n\n");
        return alignWithCoreNlp(document);
    }


    public boolean alignWithCoreNlp(Annotation document){
        if(this.originalText == null){
            System.out.println("alignWithCoreNlp must be called after get Linking Result");
            return false;
        }
        Map<Integer,LinkedEntity> beginEntityMap = new HashMap<>();
        Map<Integer,LinkedEntity> endEntityMap = new HashMap<>();
        for(LinkedEntity entity: this.mentions){
            beginEntityMap.put(entity.offset, entity);
            endEntityMap.put(entity.offset+entity.length, entity);
        }
        /*
        对齐：
        (offset , length)  ==>  ( sentenceID, tokenID )
         */
        Integer beginIndex = 0;
        Integer endIndex = 0;

        List<CoreMap> sentences = document.get(CoreAnnotations.SentencesAnnotation.class);
        Map<Integer, CorefChain> corefs = document.get(CorefCoreAnnotations.CorefChainAnnotation.class);


        Integer sentIndex = 1; //从1开始计数


        // 保存最靠近当前遍历位置的 有效 coref
        Integer corefClustId = null;
        Integer corefTokenId = null;


        for(CoreMap sentence: sentences) {
            Map<Integer,Set<String>> tokenEntitySet;
            if(sentenceTokenEntitySet.containsKey(sentIndex))
                tokenEntitySet = sentenceTokenEntitySet.get(sentIndex);
            else{
                tokenEntitySet = new HashMap<>();
                sentenceTokenEntitySet.put(sentIndex, tokenEntitySet);
            }

            for (CoreLabel token : sentence.get(CoreAnnotations.TokensAnnotation.class)) {
                System.out.println(token.word() + "\t" + "[" + token.beginPosition() + "," + token.endPosition()+ "]");
                ///////////////////////////////////////////
                // 标记共指集合
                Integer corefTmp = token.get(CorefCoreAnnotations.CorefClusterIdAnnotation.class);
                if(corefTmp != null){
                    corefClustId = corefTmp;
                    corefTokenId = token.index();
                }

                /////////////////////////////////////////////
                // 查找 实体链接
                Integer begin = token.beginPosition();
                Integer end = token.endPosition();
                if(beginEntityMap.containsKey( begin )){
                    beginIndex = token.index();
                }
                if(endEntityMap.containsKey(end)){
                    System.out.println("\n\n[alignWithCoreNlp] Find A Entity Linking--------------");
                    // 找到一个实体链接
                    endIndex = token.index();
                    LinkedEntity entity = endEntityMap.get(end);
                    entity.endIndex = endIndex;
                    entity.startIndex = beginIndex;
                    entity.sentIndex = token.sentIndex();
                    System.out.println("startIndex = " + beginIndex + " , endIndex = " + endIndex + " , sentIndex = " + token.sentIndex());

                    // 创建倒排
                    String entityId = entity.entity;
                    if(this.entityMetadata.containsKey(entity.entity))
                        entityId = this.entityMetadata.get(entity.entity).YAGO_ID;
                    Set<String> entitySet;
                    if( tokenEntitySet.containsKey(endIndex) ){
                        entitySet = tokenEntitySet.get(endIndex);
                    }else{
                        entitySet = new HashSet<>();
                        tokenEntitySet.put(endIndex,entitySet);
                    }
                    entitySet.add( entityId );

                    // 为同一个共指消解集合的其他token添加实体
                    // 如果存在有效的  共指链 -》 被当前实体覆盖的
                    if(corefClustId != null && corefTokenId >= beginIndex){
                        CorefChain chain = corefs.get(corefClustId);
                        addEntityToSentenceTokenEntitySet(entityId, chain);
                    }
                    // 重置coref cluster
                    corefClustId = null;
                    System.out.println("\n\n");

                }
            }
            //sentenceTokenEntitySet.put(sentIndex, tokenEntitySet);
            sentIndex += 1;
        }
        return true;
    }


    private void addEntityToSentenceTokenEntitySet(String entityId, CorefChain chain){
        System.out.println("\n\n[Find A Coref Chain] ------------------------");
        Map<Integer,Set<String>> tokenEntitySet;
        Set<String> entitySet;
        Map<IntPair, Set<CorefChain.CorefMention>> mentionMap = chain.getMentionMap();
        for(Set<CorefChain.CorefMention> mentionSet : mentionMap.values()){
            for(CorefChain.CorefMention mention:mentionSet){

                Integer tokenEndIndex = mention.endIndex - 1;
                Integer sentenceIndex = mention.sentNum - 1;
                System.out.println("mention = " + mention + " , sentIndex = " + sentenceIndex + " , tokenEndIndex = " + tokenEndIndex);
                if(sentenceTokenEntitySet.containsKey(sentenceIndex)){
                    tokenEntitySet = sentenceTokenEntitySet.get(sentenceIndex);
                }
                else{
                    tokenEntitySet = new HashMap<>();
                    sentenceTokenEntitySet.put(sentenceIndex,tokenEntitySet);
                }
                if(tokenEntitySet.containsKey(tokenEndIndex)){
                    entitySet = tokenEntitySet.get(tokenEndIndex);
                }
                else{
                    entitySet = new HashSet<>();
                    tokenEntitySet.put(tokenEndIndex, entitySet);
                }
                entitySet.add(entityId);
            }
        }
    }





    public void extractLinkedEntities(){
        EntityExtractor entityExtractor = new EntityExtractor();
        for(String entity: this.entityMetadata.keySet()){
            EntityMetadata metadata = this.entityMetadata.get(entity);
            String YAGO_ID = metadata.YAGO_ID;
            /*
            抽取以该中心为起点，
            两度以内的所有三元组
             */

            entityExtractor.importEntityFromPostgreToOrient( YAGO_ID );
        }
    }


    /*********************************************************************
     *
     * @return
     */
    @Override
    public String toString(){
        return JavaToJsonString.mapJavaToJson( this );
    }


    /*************************************************
     *
     */
    public static void main(String[] args){
        String text = "Obama was born in Hawaii.";
        TextAnnotator textAnnotator = new TextAnnotator(HumanLanguage.ENGLISH);
        Annotation document = textAnnotator.getAnnotatedText( text );

        EntityLinkingResult result = new EntityLinkingResult();
        result.alignWithCoreNlp(document);
    }
}
