package com.atguigu.flink.demo04;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SocketTextStreamFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducerBase;
import org.apache.flink.streaming.connectors.kafka.internals.KeyedSerializationSchemaWrapper;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

/**
 * 通过fink 将数据写入Kafka
 * @author admin
 * @date 2021/8/10
 */
public class KafkaSink {


    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        /**
         * 从网络中读取数据
         * 参数：ip,段口，分隔符，重试次数
         */
        SocketTextStreamFunction socketText = new SocketTextStreamFunction("hadoop102", 9999, "\n", 3);
        DataStreamSource<String> source = env.addSource(socketText);

        source.print("kafka>>>");

        // 将数据写入Kafka

        // 集群地址
        String brokerList = "hadoop102:9092,hadoop103:9092,hadoop104:9092";
        String topicId = "flink_kafka";
        FlinkKafkaProducer<String> kafkaSink = new FlinkKafkaProducer<>(brokerList, topicId, new SimpleStringSchema());
        kafkaSink.invoke("javaddd");
        // 添加sink
        source.addSink(kafkaSink);

        // 启动执行
        env.execute();
    }


}
