package com.deepglint.sink;

import com.deepglint.beans.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;

import java.util.Properties;

/**
 * @author mj
 * @version 1.0
 * @date 2021-11-16 21:19
 */
public class SinkTest_Kafka {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        String inputPath = "C:\\Users\\马军\\Desktop\\Idea-workspace\\flink\\src\\main\\resources\\source.txt";
        DataStream<String> inputStream = env.readTextFile(inputPath);

        // 转换成SensorReading类型
//        DataStream<String> streamMap = inputStream.map( value -> {
//            String[] split = value.split(",");
//            return new SensorReading(split[0], split[1], new Long(split[2]), new Double(split[3])).toString();
//        });

        // 从kafka中读取数据
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers","192.168.150.128:9092");
        DataStream<String> dataStream =
                env.addSource(new FlinkKafkaConsumer<>("sensor",
                        new SimpleStringSchema(),properties));


        // 将数据存入kafka
        DataStreamSink<String> finkSinkTest = dataStream.addSink(new FlinkKafkaProducer<>("192.168.150.128:9092", "finkSinkTest", new SimpleStringSchema()));

        env.execute();
    }
}
