package com.biconn.bigdata.question.and.answer.listenner;

import java.util.ArrayList;
import java.util.List;
import java.util.Optional;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.biconn.bigdata.common.constant.Constants;
import com.biconn.bigdata.common.md5.MD5Util;
import com.biconn.bigdata.es.model.QuestionAndAnswer;
import com.biconn.bigdata.es.repository.BaseElasticsearchRepository;



@Component
public class QuestionAndAnswerListenner{
	private Logger logger = LoggerFactory.getLogger(getClass());
	@Autowired
	private MongoTemplate mongoTemplate;
	@Autowired
	private BaseElasticsearchRepository<QuestionAndAnswer> baseElasticsearchRepository;
	private static String MD5_KEY = "askContentMd5";

	/*
	 * 问、答消息消费
	 * 消费Jstorm传递过来的信息
	 * 双写入库
	 * mongodb
	 * es
	*/
	//@KafkaListener(topics = {"question_and_answer_result_topic_to_kafka"},containerFactory="batchFactory",group="big-data-support-consumer")
	public void processMessage(List<ConsumerRecord<?, ?>> records, Acknowledgment ack) {
		//开始批量持久化
		int mongoDataCount = 0;
		List<QuestionAndAnswer> questionAndAnswerList = new ArrayList<>();
		//开始处理数据
		for(ConsumerRecord<?, ?> item : records) {
			Optional<?> kafkaMessage = Optional.ofNullable(item.value());
			String message = null;
			String md5_ask_content = null;
			try {
				if (kafkaMessage.isPresent()) {
	        		//获取到jstorm数据流 实际上是一个标准的json流
	            	message = (String)kafkaMessage.get();
	            	
	    			JSONObject messageJson = JSONObject.parseObject(message);
	    			JSONObject questionJsonResult = messageJson.getJSONObject("question");
	    			
	    			//1.持久化阶段一   数据入mongodb,ask_content_md5为唯一索引，用来校验一下数据重复，对问答的“提问内容”做MD5加密，作为唯一索引存mongo,存储成功代表新数据，否则过滤掉
	    			md5_ask_content = MD5Util.encode(questionJsonResult.getString("ask_content"));
	            	questionJsonResult.put("answers", messageJson.getJSONArray("answers"));
	            	questionJsonResult.put(MD5_KEY, md5_ask_content);
	    			//mongoTemplate.insert(questionJsonResult, "question_and_answer_index_stop");
	    			mongoDataCount++;
	    			//数据入mongodb 统一处理 end

	    			//2.持久化阶段二   数据入ES start
					QuestionAndAnswer questionAndAnswer = JSON.parseObject(questionJsonResult.toJSONString(), new TypeReference<QuestionAndAnswer>() {});
					questionAndAnswerList.add(questionAndAnswer);
					//持久化阶段二   数据入ES end
	            }
			} catch (Exception e) {
				//do nothing
				logger.info("数据去重" + md5_ask_content);
			}
			
		}
		int esStatusCode = 0;
        try {
			//持久化阶段二   数据批量入ES
			if(questionAndAnswerList.size()>0) {
				//esStatusCode = baseElasticsearchRepository.batch(questionAndAnswerList, "question_and_answer_index", Constants.QUESTION_AND_ANSWER_ES_TYPE);
				esStatusCode = baseElasticsearchRepository.batch(questionAndAnswerList, "question_and_answer_realtime_index", "questionAndAnswerRealtime");
			}
			System.out.println("Mongo持久化数据量：" + mongoDataCount);
			System.out.println("ES批量保存状态码：" + esStatusCode + ",持久化数据量：" + questionAndAnswerList.size());
		} catch (Exception e) {
			//异常跳出 do nothing
			System.out.println("Topic question_and_answer_result_topic_to_kafka deal message fali !");
		}finally {
			//ack.acknowledge();//手动提交偏移量
			questionAndAnswerList.clear();
		}
	}
	
}
