package textprocessing.datasources.enwikipedia;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import common.dao.elastic.ElasticAdministratorHttpHelper;
import common.dao.elastic.ElasticSearchDao;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import textprocessing.nlpanalysis.entity_linking.TextEntityLinker;
import textprocessing.nlpanalysis.context_annotate.ParagraphSplitor;

/**
 * Created by common on 2017/3/30.
 */
public class ExtractorEnWikiNews {

    static final Logger logger = LoggerFactory.getLogger(ExtractorEnWikiNews.class);



    private ElasticSearchDao esDao = null;
    private ObjectMapper mapper = null;
    private String host;
    private Integer port;





    public ExtractorEnWikiNews(){
        this.host = "192.168.1.108";
        this.esDao = new ElasticSearchDao("elastic_cluster", this.host, 29300);
        this.mapper = new ObjectMapper();
    }

    public ExtractorEnWikiNews(String host){
        this.host = host;
        this.esDao = new ElasticSearchDao("elastic_cluster", this.host, 29300);
        this.mapper = new ObjectMapper();
    }



    public String getScrollId(JsonNode scroll_output){
        return ElasticAdministratorHttpHelper.parse_scroll_id(scroll_output);
    }

    public Integer getScrollSize(JsonNode scroll_output){
        return ElasticAdministratorHttpHelper.parse_cursor_size(scroll_output);
    }



    public JsonNode getEnwikiInitialScroll(){
        JsonNode initial_output = ElasticAdministratorHttpHelper
                .initial_scroll(this.host, 29200, "enwikinews", "page", 100, 10 );

        //ArrayNode initial_records = ElasticAdministratorHttpHelper.parse_records_array(initial_output);
        return initial_output;
    }

    public JsonNode getEnwikiContinueScroll(String scroll_id){
        JsonNode scroll_output = ElasticAdministratorHttpHelper
                .continue_scroll(this.host, 29200, 100, scroll_id);
        return scroll_output;
    }


    public ArrayNode getDocumentsArray(JsonNode scroll_output){
        return ElasticAdministratorHttpHelper.parse_records_array(scroll_output);
    }


    /*********************************************************************
     *
     */

    public void traversalAllRecords(){
        JsonNode initial_output = ElasticAdministratorHttpHelper
                .initial_scroll(this.host, 29200, "enwikinews", "page", 100, 10 );

        Integer total_size = ElasticAdministratorHttpHelper.parse_scroll_size(initial_output);
        Integer cursor_size = 0;
        Integer accum_size = 0;

        cursor_size = ElasticAdministratorHttpHelper.parse_cursor_size(initial_output);
        accum_size += cursor_size;

        ArrayNode initial_records = ElasticAdministratorHttpHelper.parse_records_array(initial_output);
        traversalScrollRecords( initial_records );

        String scroll_id = ElasticAdministratorHttpHelper.parse_scroll_id(initial_output);
        logger.info("[traversalAllRecords] scroll_id = {}", scroll_id);


        do {

            try{

                JsonNode scroll_output = ElasticAdministratorHttpHelper
                        .continue_scroll(this.host, 29200, 100, scroll_id);

                cursor_size = ElasticAdministratorHttpHelper.parse_cursor_size(scroll_output);
                accum_size += cursor_size;

                ArrayNode records = ElasticAdministratorHttpHelper.parse_records_array(scroll_output);
                traversalScrollRecords( records );

                scroll_id = ElasticAdministratorHttpHelper.parse_scroll_id(scroll_output);
                logger.info("[traversalAllRecords] scroll_id = {}", scroll_id);

                Thread.sleep(2000);


            } catch (Exception e){

                e.printStackTrace();

            }


        }
        while( cursor_size > 0);

        logger.info("[traversalAllRecords] total_size = {}, accum_size = {}", total_size, accum_size);

    }


    public void traversalScrollRecords(ArrayNode records){
        int size = records.size();
        for(int i = 0; i < size; i++){
            JsonNode record = records.get(i);
            processRecord(record);
        }
    }


    public void processRecord(JsonNode record){
        String id = record.get("_id").asText();
        logger.info("[traversalAllRecords] id = {}", id);
        String TEXT = record.get("_source").get("text").asText();

        String[] contexts = ParagraphSplitor.articleToSentences(TEXT);

        ArrayNode contextsArrayNode = this.mapper.createArrayNode();
        for(String context: contexts){
            ObjectNode contextNode = generateContext(context, id);

            processContext( contextNode );
        }
    }



    public ObjectNode generateContext(String context, String docid){
        ObjectNode contextNode = this.mapper.createObjectNode();
        contextNode.put("text", context);
        contextNode.put("docid", docid);

        String[] entities = TextEntityLinker.linkEntityFromKG( context );
        ArrayNode entitiesArrayNode = this.mapper.createArrayNode();
        for(String entity_id : entities){
            entitiesArrayNode.add(entity_id);
        }

        contextNode.put("entities", entitiesArrayNode);

        return contextNode;
    }


    public void processContext(ObjectNode contextNode){
        // null id means auto generate?
        this.esDao.index("enwikinews", "context", null, (JsonNode)contextNode);
        //logger.info("");
    }




    /*************************************************
     *
     */
    public static void main(String[] args){

        ExtractorEnWikiNews convertor = new ExtractorEnWikiNews();

        convertor.traversalAllRecords();

    }


}
