/**  
 * 
 * @Title:  WordItemToKafkaBlot.java   
 * @Package com.biconn.bigdata.jstrom.question.and.answer.bolt   
 * @Description:    TODO(用一句话描述该文件做什么)   
 * @author: 123774135@qq.com     
 * @date:   2018年8月15日 上午9:44:55   
 * @version V1.0 
 * @Copyright: 2018 www.tydic.com Inc. All rights reserved. 
 * 
 */  
package com.biconn.bigdata.jstrom.question.and.answer.bolt;

import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.IOUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.biconn.bigdata.common.md5.MD5Util;
import com.biconn.bigdata.common.util.AvroUtil;
import com.biconn.bigdata.common.util.StringResourceLoad;
import com.biconn.bigdata.common.util.StringTools;
import com.biconn.bigdata.jstrom.util.Constants;
import com.biconn.bigdata.jstrom.util.PropertiesUtil;
import com.google.common.collect.Sets;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;

/**   
 * @ClassName:  WordItemToKafkaBlot   
 * @Description:TODO(这里用一句话描述这个类的作用)   
 * @author: 123774135@qq.com 
 * @date:   2018年8月15日 上午9:44:55   
 *     
 * @Copyright: 2018 
 * 
 */
public class WordItemToKafkaBlot implements IRichBolt{

	/**   
	 * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)   
	 */ 
	private static final long serialVersionUID = 6825890679561228732L;

	private Logger logger = LoggerFactory.getLogger(this.getClass());
	
	private String avro_str ;
	private AvroUtil avroUtil ;
	
	//Set<String> md5set = Sets.newHashSet();
	public WordItemToKafkaBlot() {
		
	}
	
	
	private  KafkaProducer<String, String> kafkaProducer;
	@Override
	public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
		this.kafkaProducer = this.kafkaProducerInit();
		//this.collector = collector;
		try {
			this.avro_str =  IOUtils.toString(new File(stormConf.get("avro_path").toString()).toURI());
			this.avroUtil = new AvroUtil(new StringResourceLoad(avro_str));
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	@Override
	public void execute(Tuple input) {
		//kafkaProducerInit();
		
		try {
			GenericRecord genericrecord = (GenericRecord)input.getValueByField(Constants.JSTORM_QUESTION_AND_ANSWER_COLLECTION);
			//分成两个数据结构,用map去构建
			String anString =  StringTools.mkobjectToString(genericrecord.get("answers"));
			JSONArray jsonArray = new JSONArray();
			if(!StringTools.isBlank(anString)) {
				String [] strs = StringTools.mkSplit(anString, "#$");
				
				for(String str : strs) {
					JSONObject jsonobject = JSONObject.parseObject(str);
					jsonArray.add(jsonobject);
					//answer_time, answer_content, answer_id, answer_doctor
				}
			}
			//组装数据结构
			JSONObject jsonObject = new JSONObject();
			
////			JSONObject jsonObject1 =  JSONObject.parseObject(genericrecord.toString());
//			jsonObject1.remove("id");
//			jsonObject1.remove("answers");
			GenericRecord genericrecord_1 = avroUtil.copy(avroUtil.getSchemasBytypename("PatientOfQuestion"), avroUtil.getSchemasBytypename("PatientOfQuestionProcess"), genericrecord);
			//genericrecord_1.put("answers", "");//抹掉原有的answers对应的值
			jsonObject.put(Constants.TAG_QUESTION, genericrecord_1.toString());
			jsonObject.put(Constants.TAG_ANSWERS, jsonArray);
			//System.out.println(jsonObject.toString());
			Future<RecordMetadata> future =	kafkaProducer.send(new ProducerRecord<String, String>(Constants.TAG_TOPIC_NAME, Constants.TAG_BUTO_KEY, jsonObject.toJSONString()));
			RecordMetadata recordMetadata = future.get(2000, TimeUnit.SECONDS);
			
			recordMetadata.offset();
		} catch (Exception e) {
			e.printStackTrace();
		} 
	}

	@Override
	public void cleanup() {
		// TODO Auto-generated method stub
		
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		// TODO Auto-generated method stub
		
	}

	@Override
	public Map<String, Object> getComponentConfiguration() {
		// TODO Auto-generated method stub
		return null;
	}

	/**
	 * 初始化kafka producer配置
	 */
	private   KafkaProducer<String,String>  kafkaProducerInit(){
		   Properties props = new Properties();
	       props.put("bootstrap.servers", PropertiesUtil.getValue("kafka.consumer.bootstrap.servers"));  
	       //props.put("group.id", "demo");
	       props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
	       props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
	       KafkaProducer<String,String>  kafkaProducer = new KafkaProducer<String, String>(props);
	     
	   	   System.out.println("消息队列 生产者开始初始化...");
	   	   return kafkaProducer;
		}
}
