package com.intellif.mozping.util;


import com.intellif.mozping.producer.KafkaProducerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Properties;

@Component
public class TaskUtil {

    @Autowired
    KafkaProducerConfig producerConfig;

    /**
     * 获取kafka的配置
     *
     * @param path
     * @return
     */
    public static Properties getKafkaConf(String path) {
        Properties kafkaProperties = new Properties();
        if (null == path || "".equals(path)) {
            throw new IllegalArgumentException("Properties file path can not be null" + path);
        }
        InputStream inputStream = null;
        Properties properties = null;
        try {
            inputStream = new FileInputStream(new File(path));
            properties = new Properties();
            kafkaProperties = new Properties();
            properties.load(inputStream);

            kafkaProperties.setProperty("topicName", properties.getProperty("cluster.topicName"));
            kafkaProperties.setProperty("topicFileName", properties.getProperty("cluster.topicFileName"));
            kafkaProperties.setProperty("bootstrap.servers", properties.getProperty("cluster.bootstrap.servers"));
            kafkaProperties.setProperty("acks", properties.getProperty("cluster.acks"));
            kafkaProperties.setProperty("retries", properties.getProperty("cluster.retries"));

            kafkaProperties.setProperty("batch.size", properties.getProperty("cluster.batch.size"));
            kafkaProperties.setProperty("linger.ms", properties.getProperty("cluster.linger.ms"));
            kafkaProperties.setProperty("buffer.memory", properties.getProperty("cluster.buffer.memory"));
            kafkaProperties.setProperty("key.serializer", properties.getProperty("cluster.key.serializer"));
            kafkaProperties.setProperty("value.serializer", properties.getProperty("cluster.value.serializer"));
        } catch (Exception e) {
            kafkaProperties = null;
            e.printStackTrace();
        } finally {
            try {
                if (null != inputStream) {
                    inputStream.close();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        return kafkaProperties;
    }

    public Properties getKafkaConf() {
        Properties kafkaProperties = new Properties();
        kafkaProperties.setProperty("topicName", producerConfig.getTopics());
        //kafkaProperties.setProperty("topicFileName", producerConfig.getTopicFileName());
        kafkaProperties.setProperty("bootstrap.servers", producerConfig.getBootstrapServers());
        kafkaProperties.setProperty("acks", producerConfig.getAcks());
        kafkaProperties.setProperty("retries", producerConfig.getRetries());

        kafkaProperties.setProperty("batch.size", producerConfig.getBatchSize());
        kafkaProperties.setProperty("linger.ms", producerConfig.getLingerMs());
        kafkaProperties.setProperty("buffer.memory", producerConfig.getBufferMemory());
        kafkaProperties.setProperty("key.serializer", producerConfig.getKeySerializer());
        kafkaProperties.setProperty("value.serializer", producerConfig.getValueSerializer());
        return kafkaProperties;
    }
}
