package app.socket.client;

import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Arrays;
import java.util.Set;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import app.htby.utils.KafkaConsumeUtils;
import app.htby.utils.SocketUtils;
import app.htby.utils.SpringContextUtil;
import app.kafka.consume.DisWebConsumer;
import app.kafka.consume.DisWebConsumerRunnable;

/**
 * 分发系统的 socket异步连接
 * 
 * @author lfy.xys
 * @date 2018年5月22日
 *
 */
public class DisConnClientRunnableTest implements Runnable {

	private static Logger logger = LoggerFactory.getLogger(DisConnClientRunnableTest.class);


	@Override
	public void run() {
		String msgStr = "topic_1&&topic_2";
		logger.info("客户端收到的信息：{}" ,msgStr);
		CreateKafka(msgStr);// 创建消费者
	}

	/**
	 * 服务端连接补上，客户端断线重连
	 * 
	 * @author lfy.xys
	 * @date 2018年5月30日
	 *
	 */
	public void reconnect() {
		logger.info("服务端连接失败，{}ms后进行重连...", SocketUtils.NIO_RECONNECT_INTERVAL_TIME);
		try {
			Thread.sleep(SocketUtils.NIO_RECONNECT_INTERVAL_TIME);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
		this.run();
	}

	public void CreateKafka(String msgStr) {
		//根据 && 切分
		String[] msgs = msgStr.split("&&");
		logger.info("消费的topic集合：{}",Arrays.toString(msgs));
		for (String topic : msgs) {
			
			logger.info("消费的topic：{}",topic);
			//判断 每个 topic 是否已经被 消费. 存在则被消费过
			if(!KafkaConsumeUtils.topicSet.contains(topic)){
				// 没有被消费过的topic
				String beanId = "Consumer_" + KafkaConsumeUtils.topicCount;//设置id
				SpringContextUtil.registerBean(beanId, DisWebConsumer.class.getName());// 使用spring创建bean
				// 创建线程消费
				new Thread(new DisWebConsumerRunnable(topic,beanId)).start();
			}
		}
		
	}

}
