package com.biconn.bigdata.jstrom.question.and.answer.bolt;

import java.io.File;
import java.nio.charset.Charset;
import java.util.Date;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.biconn.bigdata.common.util.DateUtil;
import com.biconn.bigdata.common.util.DateUtils;
import com.biconn.bigdata.common.util.StringTools;
import com.biconn.bigdata.jstrom.util.Constants;
import com.biconn.bigdata.jstrom.util.PropertiesUtil;
import com.biconn.bigdata.jstrom.util.RegularExpress;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;

public class QuestionAndAnswerOntimeBolt implements IRichBolt{
	private Logger logger = LoggerFactory.getLogger(getClass());
	// 一定要 生成 一个 serialVersionUID，因为这些class 都是要经过序列化的
	private static final long serialVersionUID = 8740926838799779884L;
	//特殊字符
	private static final String specialCharater = "[`~@#$%^&*()+=|{}':'\\\\\\\\[\\\\\\\\].<>/~@#￥%……&*（）——+|{}【】‘”“’r n t]";
	//需要过滤的字符
	private static final String englishCharaterAndNumber = "[A-Za-z0-9]";
	private OutputCollector collector;
	private  KafkaProducer<String, String> kafkaProducer;
	Map<String,String> map = RegularExpress.loadXml();
	
	//private KafkaProducer<String, String> kafkaProducer;

	public QuestionAndAnswerOntimeBolt() {
		logger.info("QuestionAndAnswerBolt:**********************************");
	}


	@Override
	public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
		this.kafkaProducer = this.kafkaProducerInit();
	}
	
	
	/**
	 * 
	 * <p>Title: execute</p>   
	 * <p>Description: </p>   
	 * @param input   
	 * @see backtype.storm.task.IBolt#execute(backtype.storm.tuple.Tuple)
	 */
	@Override
	public void execute(Tuple input) {
		try {
			//从指定的会话中获取数据
			String genericrecord = (String)input.getValueByField(Constants.SCRAPY_QUESTION_AND_ANSWER_TOPIC_JSTORM_REALTIME_FIELD);
			genericrecord = genericrecord.replaceAll("False", "");
			JSONObject jsonMsg = JSONObject.parseObject(genericrecord);
			//过滤掉问题中的特殊字符
			String question_content = StringUtils.replaceAll(StringTools.mkobjectToString(jsonMsg.get("question_content")), specialCharater, "");
			//回复结果集
			JSONArray answerArray = new JSONArray();
			//获取回复列表
			String answers =  StringTools.mkobjectToString(jsonMsg.get("answers"));
			if(!StringUtils.isEmpty(answers)) {
				JSONArray jsonArray = JSON.parseArray(answers);
				//有的没有回复 过滤掉
				if(jsonArray.size() > 0) {
					for(int n=0;n<jsonArray.size();n++) {
						JSONObject item = jsonArray.getJSONObject(n);
						String answerContent = item.getString("answer_content");
						if(!StringUtils.isEmpty(answerContent)) {
							//去掉所有特殊字符和空格
							answerContent = StringUtils.replaceAll(item.getString("answer_content"), specialCharater, "").replaceAll("\\s*", "");
							answerContent = StringUtils.replaceAll(answerContent, englishCharaterAndNumber, "");
							item.put("answer_content", answerContent);
							//图片链接只去掉空格
							item.put("doctor_img", item.getString("doctor_img").replaceAll("\\s*", ""));
							item.put("doctor_dep", StringUtils.replaceAll(item.getString("doctor_dep"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
							item.put("doctor_good", StringUtils.replaceAll(item.getString("doctor_good"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
							item.put("doctor_hos", StringUtils.replaceAll(item.getString("doctor_hos"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
							item.put("doctor_name", StringUtils.replaceAll(item.getString("doctor_name"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
							item.put("doctor_pos", StringUtils.replaceAll(item.getString("doctor_pos"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
							answerArray.add(item);
						}
					}
					//处理时间，尽可能使用抓取的时间
					questionTime(jsonMsg);
					jsonMsg.put("answers", answerArray.toString());
					jsonMsg.put("question_content", StringUtils.replaceAll(question_content, englishCharaterAndNumber, "").replaceAll("\\s*", ""));
					jsonMsg.put("title", StringUtils.replaceAll(jsonMsg.getString("title"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
					jsonMsg.put("department", StringUtils.replaceAll(jsonMsg.getString("department"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
					jsonMsg.put("source_web", StringUtils.replaceAll(jsonMsg.getString("source_web"), englishCharaterAndNumber, "").replaceAll("\\s*", ""));
					//FileUtils.write(new File("D:/jstorm-test-data"+"/"+DateUtil.now("yyyyMMdd")+"-RealTime.txt"), jsonMsg.toString()+"\r\n", Charset.defaultCharset(),true);
					Future<RecordMetadata> future =	kafkaProducer.send(new ProducerRecord<String, String>(Constants.QUESTION_AND_ANSWER_JSTORM_BOLT_DEALED_TOPIC, Constants.QUESTION_AND_ANSWER_JSTORM_BOLT_DEALED_TOPIC_KEY, jsonMsg.toJSONString()));
					RecordMetadata recordMetadata = future.get(2000, TimeUnit.SECONDS);
					recordMetadata.offset();
				}
			}
		} catch (Exception e) {
			logger.info("ontime json处理过程异常");
		}
	}

	/**
	 * Topology 被 shutdown时会被触发的动作，我们可以用来做一些清理工作
	 */
	@Override
	public void cleanup() {
		//System.out.println("*******************public void cleanup()");
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
	}

	@Override
	public Map<String, Object> getComponentConfiguration() {
		// TODO Auto-generated method stub
		return null;
	}
	/**
	 * 初始化kafka producer配置
	 */
	private   KafkaProducer<String,String>  kafkaProducerInit(){
		   Properties props = new Properties();
	       props.put("bootstrap.servers", PropertiesUtil.getValue("kafka.consumer.bootstrap.servers"));
	       props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
	       props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
	       KafkaProducer<String,String>  kafkaProducer = new KafkaProducer<String, String>(props);
	     
	   	   System.out.println("消息队列 生产者开始初始化...");
	   	   return kafkaProducer;
		}
	
	private void questionTime(JSONObject jsonMsg) {
		try {
			//抓取到的时间格式处理 先判断是否 yyyy-MM-ddHH:mm:ss 格式，是的话转换下格式不是的话异常处理，使用当前时间
			String questionTime = jsonMsg.getString("question_time");
			if(!com.biconn.bigdata.jstrom.util.StringUtils.isEmpty(questionTime)) {
				//YYYY_MM_DD  2018-09-12 
				if(questionTime.length()==10) {
					//do nothing
				}else if(questionTime.length()==18) {//2018-09-1208:34:21
					Date date = DateUtils.dateTime(DateUtils.YYYY_MM_DDHH_MM_SS, questionTime);
					jsonMsg.put("question_time", DateUtils.parseDateToStr(DateUtils.YYYY_MM_DD_HH_MM_SS, date));
				}else if(questionTime.length()==19) { //2018-09-12 08:34:21
					//do nothing
				}else {
					jsonMsg.put("question_time", DateUtils.parseDateToStr(DateUtils.YYYY_MM_DD_HH_MM_SS, new Date()));
				}
				
			}else {
				jsonMsg.put("question_time", DateUtils.parseDateToStr(DateUtils.YYYY_MM_DD_HH_MM_SS, new Date()));
			}
		} catch (Exception e) {
			jsonMsg.put("question_time", DateUtils.parseDateToStr(DateUtils.YYYY_MM_DD_HH_MM_SS, new Date()));
		}
	}
}
