package com.atguigu.day06;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

//sink to kafka 将文件里的数据一条条读取写到kafka里边去
public class Example7 {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        Properties props = new Properties();
        props.put("bootstrap.servers","hadoop102:9092");

        env
                .readTextFile("D:\\code\\git\\yanzl_PC\\flinktutorial0701\\src\\main\\resources\\UserBehavior.csv")
                .addSink(
                        new FlinkKafkaProducer<String>(
                                "userbehavior-0701",
                                new SimpleStringSchema(),
                                props
                        )
                );



        env.execute();
    }
}
