package org.dromara.solonplugins.kafka1;

import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.noear.solon.Utils;
import org.noear.solon.core.AppContext;
import org.noear.solon.core.Props;

public class Kafka1Manager {

	private static final String TAG = "solon.kafka1.";

//	private static final String ATTR_NAME = "name";
	private static final String ATTR_CONSUMER = "consumer";
	private static final String ATTR_PRODUCER = "producer";

//	private static final Map<String,Producer> producerMap = new ConcurrentHashMap<>();//生产者
//	private KafkaProducer<String, String> producer;// 生产者
	private KafkaConsumer<String, String> consumer;// 消费者

//	protected Kafka1Manager(AppContext app) {
////		initProducer(app);
//		
//		KafkaProducer<String, String> producer = getProducer(app);
//		if(producer!=null) {
//			Kafka1ProducerTemplate kpt = new Kafka1ProducerTemplate(producer);
//			app.beanInject(kpt);
//		}
//	}

//	private void initProducer(AppContext app) {
////		if (producer != null) {
////			return;
////		}
////
////		synchronized (this) {
////			if (producer != null) {
////				return;
////			}
////			producer = getProducer(app);
////		}
//	}

	protected KafkaProducer<String, String> getProducer(Props props) {
//		String tmp = props.get(TAG + ATTR_PRODUCER + ".batchSize");
//		if (Utils.isNotBlank(tmp)) {
//			tmp = tmp.trim().toLowerCase();
//		}
		Kafka1ProducerProperties kp = props.getBean(TAG + ATTR_PRODUCER, Kafka1ProducerProperties.class);
//		kp.setBatchSize(getSize(tmp, 0));
		
		String tmp = kp.getBufferMemory();
		if (Utils.isNotBlank(tmp)) {
			tmp = tmp.trim().toLowerCase();
		}
		long bufferMemory = getSize(tmp, 0);
		
		Properties properties = new Properties();

		properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, String.join(",", kp.getBootstrapServers()));
		properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, kp.getKeySerializer().getName());
		properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, kp.getValueSerializer().getName());
		// 发送ack级别（最高了）
		properties.put(ProducerConfig.ACKS_CONFIG, kp.getAcks());
		// 重试次数
		properties.put(ProducerConfig.RETRIES_CONFIG, kp.getRetries());
		properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
		properties.put(ProducerConfig.BATCH_SIZE_CONFIG, kp.getBatchSize()); // 默认是16384Bytes，即16kB

		// 绑定定制属性
		Map<String, String> map = kp.getProperties();
		if (map != null && map.size() > 0) {
			map.forEach((k, v) -> {
				properties.put(k, v);
			});
		}

		props.remove(TAG + ATTR_PRODUCER);// 释放kafka1的配置信息

		return new KafkaProducer<>(properties);
	}

	private static long getSize(String tmp, long def) {
		if (tmp == null) {
			return def;
		}

		if (tmp.endsWith("gb")) {
			long val = Long.parseLong(tmp.substring(0, tmp.length() - 2));
			return val * 1204 * 1204 * 1204;
		} else if (tmp.endsWith("mb")) {
			long val = Long.parseLong(tmp.substring(0, tmp.length() - 2));
			return val * 1204 * 1204;
		} else if (tmp.endsWith("kb")) {
			long val = Long.parseLong(tmp.substring(0, tmp.length() - 2));
			return val * 1204;
		} else if (tmp.length() > 0) {
			return Long.parseLong(tmp); // 支持-1
		} else {
			return def;// 默认0，表示不设置
		}
	}

	protected void stop() {
//		if (producer != null) {
//			producer.close();// 关闭生产者
//		}
		if (consumer != null) {
			consumer.close();// 关闭消费者
		}
	}

}
