package job;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import model.Organization;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jackson.JsonNode;
import job.Producer.Topic;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.StringDecoder;
import play.db.jpa.JPA;
import play.jobs.Every;
import play.jobs.Job;
import utils.JsonUtil;
import utils.LogUtil;
import utils.TopicUtil;

/**   
 * 描述:消费网站客运站资料信息-同步客运站名称  
 * @author:zhangweigang
 * @date:2018年8月4日 上午8:56:01     
 */
//@SuppressWarnings("rawtypes")
//@Every(value = "5s")
public class WebbusTopicConsumer extends Job{
	
	private final ConsumerConnector consumer;
	private final String topic;

	public WebbusTopicConsumer() {
		consumer = kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());// 创建到zookeeper的连接
		this.topic = TopicUtil.getTopic(Topic.busTopic);
	}

	private ConsumerConfig createConsumerConfig() {
		Properties props = new Properties();
		props.put("zookeeper.connect", KafkaProperties.zkConnect);// 要连接的zookeeper地址
		props.put("group.id", KafkaProperties.groupId);// 消费者所在的组
//		props.put("consumer.id", KafkaProperties.consumerId);// 此消费者的唯一标识用于在zookeeper中存储此消费者的偏移量 
		//props.put("consumer.id", "LockConsumer");// 此消费者的唯一标识用于在zookeeper中存储此消费者的偏移量 
		props.put("zookeeper.session.timeout.ms", KafkaProperties.zSessionTimeout);// zookeeper连接超时时间  
		// props.put("zookeeper.sync.time.ms", "2000");
		props.put("auto.offset.reset", "smallest");
		props.put("auto.commit.enable", "false");
		return new ConsumerConfig(props);
	}

	@Override
	public void doJob() throws Exception {
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(topic, new Integer(1));// topic:要取的主题，Integer:创建的数据流个数
		Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder(null), new StringDecoder(null));// 获取创建的流
		List<KafkaStream<String, String>> list = consumerMap.get(topic);// 取得某一个消息的流
		KafkaStream<String, String> kafkaStream = list.get(0);// 刚才创建流是传入的流个数是1，所以通过get(0)获取
		ConsumerIterator<String, String> it = kafkaStream.iterator();// 遍历消息，此函数会一直阻塞
		MessageAndMetadata<String, String> data = null;
		while (it.hasNext()) {
			data = it.next();
			// 此处做对消息的处理
			String message = data.message();
			dealMessage(message);
			consumer.commitOffsets();// 提交此消费者的偏移量
			JPA.closeTx(true);
		}
		super.doJob();
	}
	
	private void dealMessage(String message){
		String currrntDB = JPA.getCurrentConfigName();
		try {
			JsonNode obnode = JsonUtil.getJsonNode(message);
			JsonNode entitynode = obnode.get("entitydata");
			String tablename = obnode.get("tablename").getTextValue();
			
			if("businfo".equals(tablename) && null != entitynode.get("buscode") && null != entitynode.get("busname")){
				String buscode = entitynode.get("buscode").asText();
				String busname = entitynode.get("busname").asText();
				if(StringUtils.isEmpty(buscode) || StringUtils.isEmpty(busname)){
					return;
				}
				JPA.setCurrentConfigName("play");
				Organization org = Organization.findBy_code(buscode, true);
				if(null == org){
					return;
				}
				
				String sql = "update organization set name=:busname where code=:buscode ";
				JPA.setCurrentConfigName(null);
				if(!JPA.em().getTransaction().isActive()){
					JPA.em().getTransaction().begin();
				}
				JPA.em().createNativeQuery(sql).setParameter("busname", busname).setParameter("buscode", buscode).executeUpdate();
				if(JPA.em().getTransaction().isActive()){
					JPA.em().getTransaction().commit();
				}
			}
		} catch (Exception e) {
			LogUtil.logerror("dealMessage()消费客运站信息失败"+message, e);
			if(JPA.em().getTransaction().isActive()){
				JPA.em().getTransaction().rollback();
			}
		}finally {
			JPA.setCurrentConfigName(currrntDB);
		}
	}
}
