package com.biconn.bigdata.question.and.answer.listenner;

import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;

import org.apache.commons.io.FileUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.biconn.bigdata.common.md5.MD5Util;
import com.biconn.bigdata.common.util.DateUtil;
import com.biconn.bigdata.es.model.QuestionAndAnswerOnline;
import com.biconn.bigdata.es.repository.BaseElasticsearchRepository;



@Component
public class QuestionAndAnswerOntimeListenner{
	private Logger logger = LoggerFactory.getLogger(getClass());
	@Autowired
	private MongoTemplate mongoTemplate;
	@Autowired
	private BaseElasticsearchRepository<QuestionAndAnswerOnline> baseElasticsearchRepository;
	@Value("${question.and.answer.repeatdata.path}")
	private String repeatdataPath;
	private static String MD5_KEY = "ask_content_md5";
	//数据集合名称
	private static String DATA_STORE_INDEX_NAME = "question_and_answer_realtime_index";
	private static String ES_DATA_TYPE_NAME = "questionAndAnswerRealtime";
	/*
	 * 问、答消息消费
	 * 消费Jstorm传递过来的信息
	 * 双写入库
	 * mongodb
	 * es
	*/
	@KafkaListener(topics = {"question.and.answer.jstorm.bolt.dealed.topic"},containerFactory="batchFactory",group="question_and_answer")
	public void processMessage(List<ConsumerRecord<?, ?>> records, Acknowledgment ack) {
		//开始批量持久化
		int mongoDataCount = 0;
		List<QuestionAndAnswerOnline> questionAndAnswerOntimeList = new ArrayList<>();
		//开始处理数据
		for(ConsumerRecord<?, ?> item : records) {
			Optional<?> kafkaMessage = Optional.ofNullable(item.value());
			String message = null;
			String md5_ask_content = null;
			try {
				if (kafkaMessage.isPresent()) {
	        		//获取到jstorm数据流 实际上是一个标准的json流
	            	message = (String)kafkaMessage.get();
	    			JSONObject messageJson = JSONObject.parseObject(message);
	    			int answerContentSize = messageJson.getJSONArray("answers").size();
	    			//当医生回复不存在是跳过
	    			if(answerContentSize>0) {
	    				//1.持久化阶段一   数据入mongodb,ask_content_md5为唯一索引，用来校验一下数据重复，对问答的“提问内容”做MD5加密，作为唯一索引存mongo,存储成功代表新数据，否则过滤掉
		    			md5_ask_content = MD5Util.encode(messageJson.getString("question_content"));
		    			messageJson.put(MD5_KEY, md5_ask_content);
		    			//转化成对象，只为了做名称变换
		    			QuestionAndAnswerOnline questionAndAnswerOntime = JSON.parseObject(messageJson.toJSONString(), new TypeReference<QuestionAndAnswerOnline>() {});
		    			//存储还是按JSONObject   这边转换下answerContent格式为数组
		    			JSONObject renameJsonResult = JSONObject.parseObject(questionAndAnswerOntime.toString());
		    			renameJsonResult.put("answerContent", renameJsonResult.getJSONArray("answerContent"));
		    			renameJsonResult.remove("id");//转换过来会有id，删除掉
		    			mongoTemplate.insert(renameJsonResult, DATA_STORE_INDEX_NAME);//
		    			mongoDataCount++;
		    			//数据入mongodb 统一处理 end

		    			//2.持久化阶段二   数据入ES start
		    			questionAndAnswerOntimeList.add(questionAndAnswerOntime);
						//持久化阶段二   数据入ES end
	    			}
	    			
	            }
			} catch (Exception e) {
				//do nothing
				logger.info("数据去重" + md5_ask_content);
			}
			
		}
		int esStatusCode = 0;
        try {
			//持久化阶段二   数据批量入ES
			if(questionAndAnswerOntimeList.size()>0) {
				esStatusCode = baseElasticsearchRepository.batch(questionAndAnswerOntimeList, DATA_STORE_INDEX_NAME, ES_DATA_TYPE_NAME);
			}
			logger.info("Mongo持久化数据量：" + mongoDataCount);
			logger.info("ES批量保存状态码：" + esStatusCode + ",持久化数据量：" + questionAndAnswerOntimeList.size());
		} catch (Exception e) {
			//异常跳出 do nothing
			System.out.println("Topic question_and_answer_result_topic_to_kafka deal message fali !");
		}finally {
			ack.acknowledge();//手动提交偏移量
			questionAndAnswerOntimeList.clear();
		}
	}
	
}
