package com.xiguthings.xiniu.iot.device.state.kafka;

import javax.annotation.PostConstruct;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import com.xiguthings.iot.kafka.KafkaClienAstract;
import com.xiguthings.iot.kafka.consumer.Consumer;
import com.xiguthings.iot.kafka.consumer.base.KafkaConsumerConfig;
import com.xiguthings.iot.kafka.producer.DataProducer;
import com.xiguthings.iot.kafka.producer.base.KafkaProducerConfig;
import com.xiguthings.xiniu.iot.device.state.controller.DataCenter;

@Component
public class KafkaClient extends KafkaClienAstract {
	private final Logger LOGGER = LoggerFactory.getLogger(KafkaClient.class);

	private DataCenter dataCenter;

	private DataProducer dataProducer;

	@Value("${kafka.consumer.topic}")
	private String topic;
	@Value("${kafka.consumer.group-id}")
	private String groupId;
	@Value("${kafka_consumer_bootstrap_servers}")
	public String consumerServers;
	@Value("${kafka.enable.auto.commit}")
	public boolean enableAutoCommit;
	@Value("${kafka.session.timeout.ms}")
	public String sessionTimeout;
	@Value("${kafka.auto.commit.interval.ms}")
	public String autoCommitInterval;
	@Value("${kafka.auto.offset.reset}")
	public String autoOffsetReset;

	@Value("${kafka.producer.acks}")
	public String acks;
	@Value("${kafka_producer_bootstrap_servers}")
	public String producerServers;
	@Value("${kafka.producer.retries}")
	public int retries;
	@Value("${kafka.producer.buffer.memory}")
	public long bufferMemorry;
	@Value("${kafka.producer.batch.size}")
	public int bacthSize;
	@Value("${kafka.producer.linger.ms}")
	public long lingerMs;

	public void setDataCenter(DataCenter dataCenter) {
		this.dataCenter = dataCenter;
	}

	@PostConstruct
	private void init() {
		// 创建生产者
		KafkaProducerConfig kafkaProducerConfig = new KafkaProducerConfig(acks, producerServers, retries, bufferMemorry,
				bacthSize, lingerMs);
		this.dataProducer = new DataProducer(kafkaProducerConfig);
	}

	/**
	 * 获取一个消费者线程，执行run方法，即可自动消费
	 * 
	 * @return
	 */
	public ConsumerRunnable getConsumerRunnable() {
		LOGGER.info("kafka客户端启动了");
		KafkaConsumerConfig kafkaConsumerConfig = new KafkaConsumerConfig(consumerServers, enableAutoCommit,
				sessionTimeout, autoCommitInterval, autoOffsetReset);
		// 创建消费者
		Consumer consumer = new Consumer(topic, groupId, kafkaConsumerConfig);
		ConsumerRunnable consumerRunnable = new ConsumerRunnable(this, consumer);
		return consumerRunnable;
	}

	/**
	 * 任何一个消费者收到消息以后，会自动调用此方法
	 */
	@Override
	public void handlerMessage(Consumer consumer, String dataStr) {
		try {
			dataCenter.receiveMessage(dataStr);
		} catch (Exception e) {
			LOGGER.error("", e);
		}
	}

	public void produceData(String value, String topic, Integer partition) {
		super.produceData(dataProducer, value, topic, partition);
	}
}
