package com.chencong.online.utils;

import com.chencong.online.common.ConfigCommon;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;

/**
 * @Author chencong
 * @Description kafka工具类
 * @Date 5:34 下午 2021/12/19
 * @Param
 **/
public class KafkaUtil {
    //kafka配置信息
    private static Properties properties = new Properties();

    static {
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, ConfigCommon.TEST239_KAFKA_SERVER);
    }

    /**
     * 功能描述: kafka生产着
     *
     * @return
     * @Param:
     * @Return:
     * @Author: chencong
     * @Date: 2021/12/19 5:47 下午
     */
    public static FlinkKafkaProducer<String> getFlinkKafkaProducer(String topic) {

        return new FlinkKafkaProducer<String>(topic
                , new SimpleStringSchema() //序列化为String
                , properties
        );

    }

    /**
     * 功能描述:kafka消费者
     *
     * @Param:
     * @Return:
     * @Author: chencong
     * @Date: 2021/12/19 5:51 下午
     */
    public static FlinkKafkaConsumerBase<String> getFlinkKafkaConsumer(String topic, String groupId, String offsetStrategy) {
        //添加消费组信息
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, offsetStrategy);
        properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");//自动提交offset
        properties.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, "8388608");// 设置分区消费消息最大size 8M

        return new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), properties);
    }

    /**
     * 功能描述: 将本地文件读取至kafka
     *
     * @Param:
     * @Return:
     * @Author: chencong
     * @Date: 2021/12/19 6:14 下午
     */

    public static void getWriteToKafka(String topic, String path) throws IOException {
        //序列化
        properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // 定义一个Kafka Producer
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);

        // 用缓冲方式读取文本
        BufferedReader bufferedReader = new BufferedReader(new FileReader(path));
        String line;
        while ((line = bufferedReader.readLine()) != null) {
            ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, line);
            // 用producer发送数据
            kafkaProducer.send(producerRecord);
        }
        kafkaProducer.close();
    }

}
