package mq.cinrad.kafka.app;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import mq.cinrad.kafka.app.config.KafkaWatchFileConfig;
import mq.cinrad.kafka.app.producer.DirWatch;
import mq.cinrad.kafka.app.producer.FileActionCallback;
import mq.cinrad.kafka.app.producer.FileActionCallbackFactory;
import mq.cinrad.kafka.app.producer.KafkaWatchFileFilter;

@SpringBootApplication
public class CinradProducer implements CommandLineRunner {

	private static final Logger logger = LoggerFactory.getLogger(CinradProducer.class);

	public static void main(String[] args) {
		SpringApplication.run(CinradProducer.class, args);
	}

	@Autowired
	private KafkaWatchFileConfig watchConfig;

	@Override
	public void run(String... arg0) throws Exception {

		// if (null == arg0 || arg0.length == 0 || !arg0[0].endsWith("cinrad.json")) {
		// logger.error("请在第一个参数指定**cinrad.json的完整路径");
		// return;
		// }

		if (watchConfig != null) {

			// WatchService watcher = FileSystems.getDefault().newWatchService();

			KafkaTemplate<String, String> template = createTemplate(watchConfig);
			template.setDefaultTopic(watchConfig.getDefaultTopic());

			ExecutorService executorService = Executors.newFixedThreadPool(1);

			FileActionCallback callback = FileActionCallbackFactory.getFileActionCallback(template,
					new KafkaWatchFileFilter(watchConfig));
			DirWatch watch = new DirWatch(callback);
			executorService.execute(watch);

		} else {
			logger.error("WatchConfig is null!");
		}

	}

	public KafkaWatchFileConfig getWatchConfig() {
		return watchConfig;
	}

	public void setWatchConfig(KafkaWatchFileConfig watchConfig) {
		this.watchConfig = watchConfig;
	}

	private KafkaTemplate<String, String> createTemplate(KafkaWatchFileConfig config) {
		Map<String, Object> senderProps = producerProps(config);
		ProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<String, String>(senderProps);
		KafkaTemplate<String, String> template = new KafkaTemplate<>(pf);
		return template;
	}

	private Map<String, Object> producerProps(KafkaWatchFileConfig config) {
		Map<String, Object> props = new HashMap<>();
		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getBootstrapServer());
		props.put(ProducerConfig.RETRIES_CONFIG, 0);
		props.put(ProducerConfig.BATCH_SIZE_CONFIG, 2048);
		props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
		props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
		return props;
	}

}
