package com.peaksport.framework.extend.kafka;

import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.ExecutionException;

import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.DescribeTopicsResult;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.admin.TopicDescription;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;

import com.peaksport.framework.base.dao.IBaseDAO;
import com.peaksport.framework.base.service.CoreBaseService;
import com.peaksport.framework.exception.PKDevException;
import com.peaksport.framework.util.PKContextUtils;
import com.peaksport.framework.util.PKJSONUtils;
import com.peaksport.framework.vo.JSONKafkaMsg;
import com.peaksport.framework.vo.SessionInfo;
import com.peaksport.pkms.base.entity.KFProduceMsgEntity;
import com.peaksport.pkms.base.venum.KFMsgTypeEnum;
import com.peaksport.pkms.oauth.entity.ClientEntity;
import com.peaksport.pkms.permit.entity.UserEntity;

import lombok.extern.slf4j.Slf4j;

/**
 * 
 * Kafka 生产者
 * 
 * PS: 在主题创建时,对主题名称可能有重命名处理,在测试环境时,主题名称前自动加上"Test."
 * @author jinbin_guo  2019年5月31日 上午10:40:29
 *
 */
@Slf4j
@Service
public class PKKafkaProducer extends CoreBaseService {

	@Autowired
	private KafkaTemplate<String, Object> kafkaTemplate;
	
	@Autowired
	private KafkaAdmin kafkaAdmin;

	/** 是否测试环境 */
	@Value("${peak.pkms.isTest:false}")
	private boolean isTest;
	/** 主题默认分区数,默认20 */
	@Value("${peak.kafka.defaultPartitionCount:20}")
	private int defaultPartitionCount;
	
	/**
	 * 重新设置topic,当测试环境,默认在topic前加上"Test."
	 * @param topic
	 * @return
	 */
	private String newTopic(String topic) {
		//当在测试环境时自动在topic前加"Test.",这与kafak接收消息topic调整相匹配
		if (isTest) topic = "Test." + topic;
		return topic;
	}
	
	/**
	 * 重新设置topic,当测试环境,默认在topic前加上"Test."
	 * @param topics
	 * @return
	 */
	private String[] newTopics(String... topics) {
		//当在测试环境时自动在topic前加"Test.",这与kafak接收消息topic调整相匹配
		if (isTest) {
			for (int i = 0, len = topics.length; i < len; i++) {
				topics[i] = "Test." + topics[i];
			}
		}
		return topics;
	}
	/**
	 * 创建主题
	 * @param topic 主题
	 * @param partitionCount 分区数
	 * @param replicationFactor 复本因子，复本因子大于等于1,小于等于kafka集群数
	 */
	public void createTopic(String topic, int partitionCount, short replicationFactor) {
		topic = newTopic(topic);
		AdminClient adminClient = KafkaAdminClient.create(kafkaAdmin.getConfig());
		
		NewTopic newTopic = new NewTopic(topic, partitionCount,replicationFactor);
		CreateTopicsResult createTopicsResult = adminClient.createTopics(Arrays.asList(newTopic));
		try {
			createTopicsResult.all().get();
		} catch (Exception ex) {
			throw PKDevException.throwException("创建kafka主题失败: %s", ex.getMessage());
		} 
	}
	
	/**
	 * 创建主题
	 * 
	 * 分区数取系统配置项peak.kafka.defaultPartitionCount,默认值20
	 * 复本因子取当前最大复本因子(即kafka集群节点数)
	 * 
	 * @param topic
	 */
	public void createTopic(String topic) {
		short replicationFactor = (short) getMaxReplicationFactor();
		createTopic(topic,defaultPartitionCount,replicationFactor);
		
	}
	/**
	 * 获取最大复本因子,即当前正在运行kafka集群节点数(仅计算正常运行中的节点)
	 * @return
	 */
	private int getMaxReplicationFactor() {
		AdminClient adminClient = KafkaAdminClient.create(kafkaAdmin.getConfig());
		try {
			Collection<Node> nodes = adminClient.describeCluster().nodes().get();
			return nodes.size();
		} catch (Exception ex) {
			throw PKDevException.throwException("创建kafka节点数失败: %s", ex.getMessage());
		}
	}
	
	/**
	 * 删除主题
	 * 
	 * @param topics
	 * @return
	 */
	public boolean deleteTopic(String... topics) {
		topics = newTopics(topics);
		AdminClient adminClient = KafkaAdminClient.create(kafkaAdmin.getConfig());
		try {
			adminClient.deleteTopics(Arrays.asList(topics)).all().get();
			return true;
		} catch (Exception ex) {
			throw PKDevException.throwException("删除kafka主题失败: %s", ex.getMessage());
		}
	}
	
	/**
	 * 检查主题是否存在
	 * @param topic
	 * @return
	 */
	public boolean existsTopic(String... topics) {
		topics = newTopics(topics);
		AdminClient adminClient = KafkaAdminClient.create(kafkaAdmin.getConfig());
		DescribeTopicsResult describeTopicsResult =  adminClient.describeTopics(Arrays.asList(topics));
		try {
			Map<String, TopicDescription> mapTopicDescription = describeTopicsResult.all().get();
			return !mapTopicDescription.isEmpty();
		} catch (ExecutionException ex) {
			if (ex.getCause() instanceof UnknownTopicOrPartitionException) return false;
			throw PKDevException.throwException("获取kafka主题失败: %s",ex.getMessage());
		} catch (Exception ex) {
			throw PKDevException.throwException("获取kafka主题失败: %s", ex.getMessage());
		} 
	}
	
	/**
	 * 
	 *  发送kafka消息
	 *  当kafka系统中不存在改主题时,系统将按匹克规则创建kafak主题,测试环境时,主题名称前自动增加"Test."
	 * @param topic 主题
	 * @param partition 指定分区
	 * @param key key
	 * @param data object消息体现
	 * @param callback 回调方法
	 * @return
	 */
	private ListenableFuture<SendResult<String, Object>> _sendMsg(String topic, Integer partition, String key, JSONKafkaMsg data, ListenableFutureCallback<SendResult<String, Object>> callback) {
		//检查主题是否存在,当主题不能在时,先自动创建主题,
		//否则在kafkaTemplate.send时,系统默认创建的主题只有一个分区、一个复本因子,不符微服务高并发的规则
		boolean existsTopic = existsTopic(topic);
		if (!existsTopic) {
			createTopic(topic);
		}
		KFProduceMsgEntity kfProduceMsgEntity = new KFProduceMsgEntity();
		SessionInfo sessionInfo = PKContextUtils.getSessionInfo(pkRedisTemplate);
		Date curDate = getCurrentTime();
		kfProduceMsgEntity.setSendTime(curDate);
		ClientEntity clientEntity = sessionInfo.getClientEntity();
		kfProduceMsgEntity.setClient(clientEntity);
		UserEntity userEntity = sessionInfo.getUserEntity();
		if (userEntity == null)
			throw PKDevException.throwException("session会话用户不能为空,请联系系统管理员");
		kfProduceMsgEntity.setSender(userEntity);

		kfProduceMsgEntity.setType(KFMsgTypeEnum.JSON);
		kfProduceMsgEntity.setMsgId(data.getUniqueId());
		kfProduceMsgEntity.setTopic(data.getTopic());
		data.setClientId(clientEntity != null ? clientEntity.getId() : null);
		data.setSenderId(userEntity.getId());
		data.setSendTime(kfProduceMsgEntity.getSendTime());
		kfProduceMsgEntity.setMsgBody(PKJSONUtils.toJSON(data));
		topic = newTopic(topic);
		ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send(topic,partition,key,data);
		save(kfProduceMsgEntity);
		if (callback != null) {
			future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
				@Override
				public void onSuccess(SendResult<String, Object> sendResult) {
					String message = String.format("成功发送kafka消息[%s].", sendResult.toString());
					log.info(message);
					kfProduceMsgEntity.setSuccess(true);
					save(kfProduceMsgEntity);
					callback.onSuccess(sendResult);
					
				}
	
				@Override
				public void onFailure(Throwable ex) {
					String message = String.format("发送kafka消息失败:[%s].", ex.getMessage());
					log.error(message);		
					kfProduceMsgEntity.setSuccess(false);
					save(kfProduceMsgEntity);
					callback.onFailure(ex);
				}
			});
		} else {
			future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
				@Override
				public void onSuccess(SendResult<String, Object> sendResult) {
					String message = String.format("成功发送kafka消息[%s].", sendResult.toString());
					log.info(message);
					kfProduceMsgEntity.setSuccess(true);
					save(kfProduceMsgEntity);
				}
				
				@Override
				public void onFailure(Throwable ex) {
					String message = String.format("发送kafka消息失败:[%s].", ex.getMessage());
					log.error(message);
					kfProduceMsgEntity.setSuccess(false);
					save(kfProduceMsgEntity);
				}

				
			});
		}
		return future;
	}
	
	/**
	 * 
	 *  发送kafka消息(同步)
	 *  当kafka系统中不存在改主题时,系统将按匹克规则创建kafak主题,测试环境时,主题名称前自动增加"Test."
	 * @param topic 主题
	 * @param partition 指定分区
	 * @param key key
	 * @param data object消息体现
	 * @param callback 回调方法
	 */
	private SendResult<String, Object> sendMsg_sync(String topic, Integer partition, String key, JSONKafkaMsg data, ListenableFutureCallback<SendResult<String, Object>> callback) {
		ListenableFuture<SendResult<String, Object>> future = _sendMsg(topic,partition,key,data,callback);
		try {
			return future.get();
		} catch (Exception ex) {
			throw PKDevException.throwException("发送kafka同步消息失败: %s",ex.getMessage());
		}
		
	}
	/**
	 *  发送kafka消息(异步)
	 *  当kafka系统中不存在改主题时,系统将按匹克规则创建kafak主题
	 * @param topic 主题
	 * @param partition 指定分区
	 * @param key key
	 * @param data object消息体现
	 * @param callback 回调方法
	 */
	private void sendMsg_async(String topic, Integer partition, String key, JSONKafkaMsg data, ListenableFutureCallback<SendResult<String, Object>> callback) {
		_sendMsg(topic,partition,key,data,callback);	
	}
	
	
	
	/**
	 * 发送JSON对象kafka消息(同步)
	 * @param topic 主题
	 * @param partition 指定分区
	 * @param key key
	 * @param data 消息
	 * @param consumeCallback 消费端回调
	 * @param callback 回调方法
	 */
	public SendResult<String, Object> sendJsonMsg_sync(String topic, Integer partition, String key, Object data, String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		JSONKafkaMsg jsonKafkaMsg = JSONKafkaMsg.create();
		jsonKafkaMsg.setKey(key);
		jsonKafkaMsg.setTopic(newTopic(topic));
		jsonKafkaMsg.setData(data);
		jsonKafkaMsg.setDataClassName(data.getClass().getName());
		jsonKafkaMsg.setConsumeCallback(consumeCallback);
		return sendMsg_sync(topic, partition, key,jsonKafkaMsg, callback);
	}
	public SendResult<String, Object> sendJsonMsg_sync(String topic, String key, Object data, String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		return sendJsonMsg_sync(topic, null,key,data,consumeCallback,callback);
	}
	public SendResult<String, Object> sendJsonMsg_sync(String topic, String key, Object data,String consumeCallback) {
		return sendJsonMsg_sync(topic, null,key,data,consumeCallback,null);
	}

	public SendResult<String, Object> sendJsonMsg_sync(String topic, int partition, Object data,String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		return sendJsonMsg_sync(topic, partition,null,data,consumeCallback,callback);
	}
	public SendResult<String, Object> sendJsonMsg_sync(String topic, int partition, Object data,String consumeCallback) {
		return sendJsonMsg_sync(topic, partition,null,data,consumeCallback,null);
	}

	public SendResult<String, Object> sendJsonMsg_sync(String topic, Object data,String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		return sendJsonMsg_sync(topic, null,null,data,consumeCallback,callback);
	}
	public SendResult<String, Object> sendJsonMsg_sync(String topic, Object data,String consumeCallback) {
		return sendJsonMsg_sync(topic, null,null,data,consumeCallback,null);
	}

	/**
	 * 发送JSON对象kafka消息(异步)
	 * @param topic 主题
	 * @param partition 指定分区
	 * @param key key
	 * @param data 消息
	 * @param consumeCallback 消费端回调
	 * @param callback 回调方法
	 */
	public void sendJsonMsg_async(String topic, Integer partition, String key, Object data,String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		JSONKafkaMsg jsonKafkaMsg = JSONKafkaMsg.create();
		jsonKafkaMsg.setKey(key);
		jsonKafkaMsg.setTopic(newTopic(topic));
		jsonKafkaMsg.setData(data);
		jsonKafkaMsg.setDataClassName(data.getClass().getName()); 
		jsonKafkaMsg.setConsumeCallback(consumeCallback);
		sendMsg_async(topic, partition, key, jsonKafkaMsg, callback);
	}
	public void sendJsonMsg_async(String topic, String key, Object data,String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		 sendJsonMsg_async(topic, null,key,data,consumeCallback,callback);
	}
	public void sendJsonMsg_async(String topic, String key, Object data,String consumeCallback) {
		 sendJsonMsg_async(topic, null,key,data,consumeCallback,null);
	}

	public void sendJsonMsg_async(String topic, int partition, Object data,String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		 sendJsonMsg_async(topic, partition,null,data,consumeCallback,callback);
	}
	public void sendJsonMsg_async(String topic, int partition, Object data,String consumeCallback) {
		 sendJsonMsg_async(topic, partition,null,data,consumeCallback,null);
	}

	public void sendJsonMsg_async(String topic, Object data,String consumeCallback,ListenableFutureCallback<SendResult<String, Object>> callback) {
		 sendJsonMsg_async(topic, null,null,data,consumeCallback,callback);
	}
	public void sendJsonMsg(String topic, Object data,String consumeCallback) {
		 sendJsonMsg_async(topic, null,null,data,consumeCallback,null);
	}

	@Override
	public IBaseDAO<?> getDAO() {return null;}
}
