package com.amigo.online.provider.kafka.receiver;

import java.util.Optional;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.stereotype.Component;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.amigo.online.provider.sync.abs.DataHandlerBroker;
import com.amigo.online.provider.sync.util.DataHandlerBrokerUtil;


/**
 * Mysql Binlog 消息接受接收者
 *
 * @version: V 1.0.0
 * @Description: （对类进行功能描述）
 * @author: zxy  QQ:1363320658
 * @date: 2018年12月10日 下午2:42:44
 */
@Component
public class KafkaMysqlBinlogReceiver {

    private static Logger logger = LoggerFactory.getLogger(KafkaMysqlBinlogReceiver.class);

    @Autowired
    DataHandlerBrokerUtil dataHandlerBrokerUtil;

    @Autowired
    DataHandlerBroker dataHandlerBroker;
	
	/*@KafkaListener(id = "mysqlbinlog-0", idIsGroup = false, groupId = "mysqlbinlog",
			clientIdPrefix = "zxy - n", topics = {"maxwell_partition"}
			topicPartitions = {
					@TopicPartition(topic = "maxwell_partition", partitions ="0")
					},
			containerFactory = "kafkaListenerContainerFactory")
	public void listen(ConsumerRecord<?, ?> record) {
		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent()) {
			Object message = kafkaMessage.get();
			JSONObject binlog = JSON.parseObject(message.toString());
			String tableName = binlog.getString("table");
			String type = binlog.getString("type");	
			JSONObject newObj = binlog.getJSONObject("data");
			JSONObject oldObj = null;
			if(binlog.get("old") != null) {
			    oldObj = binlog.getJSONObject("old");
			}			
			logger.info("分區1,數據ID為:{}  - 1", newObj.get("id")); 		
		}
	}
	
	@KafkaListener(id = "mysqlbinlog-1", idIsGroup = false, groupId = "mysqlbinlog", topics = {"maxwell_partition"}
			topicPartitions = {
					@TopicPartition(topic = "maxwell_partition", partitions = "1")
					},clientIdPrefix = "zxy - n",
			containerFactory = "kafkaListenerContainerFactory")
	public void listenPartitionTwo(ConsumerRecord<?, ?> record) {
		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent()) {
			Object message = kafkaMessage.get();
			JSONObject binlog = JSON.parseObject(message.toString());
			String tableName = binlog.getString("table");
			String type = binlog.getString("type");	
			JSONObject newObj = binlog.getJSONObject("data");
			JSONObject oldObj = null;
			if(binlog.get("old") != null) {
			    oldObj = binlog.getJSONObject("old");
			}
			logger.info("分區2,數據ID為:{} - 2", newObj.get("id")); 
		}
	}
	
	@KafkaListener(id = "mysqlbinlog-2", idIsGroup = false, groupId = "mysqlbinlog", topics = {"maxwell_partition"}
			,clientIdPrefix = "zxy - n",
					topicPartitions = {
					@TopicPartition(topic = "maxwell_partition", partitions = "2")
					},
			containerFactory = "kafkaListenerContainerFactory")
	public void listenPartitionThree(ConsumerRecord<?, ?> record) {
		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent()) {
			Object message = kafkaMessage.get();
			JSONObject binlog = JSON.parseObject(message.toString());
			String tableName = binlog.getString("table");
			String type = binlog.getString("type");	
			JSONObject newObj = binlog.getJSONObject("data");
			JSONObject oldObj = null;
			if(binlog.get("old") != null) {
			    oldObj = binlog.getJSONObject("old");
			}
			logger.info("分區3,數據ID為:{} - 3", newObj.get("id")); 		
		}
	}*/

    @KafkaListener(id = "mysqlbinlog", idIsGroup = false, clientIdPrefix = "amigo", groupId = "mysqlbinlog", topics = {"maxwell"})
    public void listen(@Payload String foo,
                       @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key,
                       @Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition,
                       @Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
                       @Header(KafkaHeaders.RECEIVED_TIMESTAMP) long ts, ConsumerRecord<?, ?> record) {

        Optional<?> kafkaMessage = Optional.ofNullable(record.value());
        if (kafkaMessage.isPresent()) {
            Object message = kafkaMessage.get();
            JSONObject binlog = JSON.parseObject(message.toString());
            String tableName = binlog.getString("table");
            String type = binlog.getString("type");
            JSONObject newObj = binlog.getJSONObject("data");
            JSONObject oldObj = null;
            if (binlog.get("old") != null) {
                oldObj = binlog.getJSONObject("old");
            }
            dataHandlerBroker.processingDistribution(tableName, type, newObj, oldObj);

        }
        logger.info("消息偏移:{},消息key:{},消息分区:{},消息收到时间戳:{}", record.offset(), key, partition, ts);
    }
	
	
	/*@KafkaListener(id = "mysqlbinlog", idIsGroup = false, clientIdPrefix = "www", groupId = "binlogTest", topics = { "amigoTest" })
	public void listen(ConsumerRecord<?, ?> record) {
		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent()) {
			Object message = kafkaMessage.get();
			JSONObject binlog = JSON.parseObject(message.toString());		
			String tableName = binlog.getString("table");
			String type = binlog.getString("type");	
			JSONObject newObj = binlog.getJSONObject("data");
			JSONObject oldObj = null;
			if(binlog.get("old") != null) {
			    oldObj = binlog.getJSONObject("old");
			}
			dataHandlerBroker.processingDistribution(tableName, type, newObj, oldObj);
			logger.info("--------------------", i++);
		}
	}*/
	
	
	/*@KafkaListener(id = "testSpringKafka", topicPattern = "1", topics = { "maxwell" }, clientIdPrefix = "Amigo-data-one")
	public void listenPartitonOne(@Payload String foo,
	        @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) Integer key,
	        @Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition,
	        @Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
	        @Header(KafkaHeaders.RECEIVED_TIMESTAMP) long ts,
	        ConsumerRecord<?, ?> record, Acknowledgment ack) {
		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent()) {
			Object message = kafkaMessage.get();
			JSONObject binlog = JSON.parseObject(message.toString());
			//logger.info(binlog.toJSONString());
			String tableName = binlog.getString("table");
			String type = binlog.getString("type");	
			JSONObject newObj = binlog.getJSONObject("data");
			//JSONObject newObj = JSON.parseObject(binlog.get("data").toString());
			JSONObject oldObj = null;
			if(binlog.get("old") != null) {
			    oldObj = binlog.getJSONObject("old");
			}
			dataHandlerBroker.processingDistribution(tableName, type, newObj, oldObj);
			if(binlog.get("old") != null) {
				//update 操作
				JSONObject oldObj = JSON.parseObject(binlog.get("old").toString());
				dataHandlerBrokerUtil.processingDistribution(tableName, type, newObj, oldObj);
			} else {
				//insert delete 操作
				dataHandlerBrokerUtil.processingDistribution(tableName, type, newObj);
			}			
		}
		//确认消息被消费
		ack.acknowledge();
	}*/

}