package com.zlm.util;

import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.*;
import java.util.Properties;

/**
 * Author: Harbour
 * Date: 2021-05-13 21:59
 * Desc: 用来生成数据
 */
public class MyKafkaUtils {

    public static void main(String[] args) throws Exception {
        String path = "D:\\JavaProject\\real-time-data-warehouse\\user-behavior-analysis\\src\\main\\resources\\apache.log";
        MyKafkaUtils.produceData("net-flow", path);
    }

    public static void produceData(String topic, String path) throws Exception {
        // 获取配置，获取producer
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(MyKafkaUtils.getProperties());

        // 读文件，写数据
        BufferedReader bufferedReader = new BufferedReader(new FileReader(path));
        String line;
        while ((line = bufferedReader.readLine()) != null) {
            ProducerRecord<String, String> record = new ProducerRecord<>(topic, line);
            kafkaProducer.send(record);
        }
        kafkaProducer.close();
    }

    public static <T> DataStream<T> getKafkaInputStream(StreamExecutionEnvironment env, String topic, DeserializationSchema<T> schema) {
        // kafka 配置项
        Properties prop = MyKafkaUtils.getProperties();
        return env.addSource(new FlinkKafkaConsumer<>(topic, schema, prop));
    }

    public static Properties getProperties() {
        InputStream is = ClassLoader.getSystemClassLoader().getResourceAsStream("application.properties");
        Properties prop = new Properties();
        try {
            prop.load(is);
//            prop.setProperty("bootstrap.servers", "hadoop201:9092");
//            prop.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//            prop.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//            prop.setProperty("auto.offset.reset", "latest");
        } catch (IOException e) {
            e.printStackTrace();
        }
        return prop;
    }
}
