package com.haoziqi.chapter_05.Sink;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.producer.KafkaProducer;

import java.util.Properties;

/**
 * description
 * created by A on 2021/3/11
 */
public class KafkaSink {
    public static void main(String[] args) {
        //1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> inuptDS = env.socketTextStream("hadoop102", 9999);
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers","hadoop1:9092,hadoop2:9092,hadoop3:9092");

        //参数1 topic id,没有kafka会自动创建,参数2,kafka
        FlinkKafkaProducer<String> kafkaSink = new FlinkKafkaProducer<>(
                "flink01",
                new SimpleStringSchema(),
                properties);
        inuptDS.addSink(kafkaSink);

        //提交任务
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
