package com.yuninglong.kafka_demo1;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
//import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
//import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;

/**
 * @author: yuninglong
 */
public class SocketToKafka {
    public static void main(String[] args) throws Exception {
        new SocketToKafka().execute(args);
    }

    private void execute(String[] args) throws Exception {
        /**
         * 实现步骤：
         * 1）初始化flink流处理的运行环境
         * 2）设置并行度为1
         * 3）添加自定义数据源，读取数据
         * 4）对读取到的数据进行转换成字符串
         * 5）将转换后的字符串实时写入到kafka集群
         * 6）递交作业
         */

        //TODO 1）初始化flink流处理的运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        //TODO 2）设置并行度为1
        env.setParallelism(1);

        String host = "192.168.202.10";
        int port = 9000;
        //TODO 3）添加自定义数据源，读取数据
        DataStreamSource<String> orderDataStreamSource = env.socketTextStream(host, port);

        //TODO 4）对读取到的数据进行转换成字符串
        SingleOutputStreamOperator<String> outputStreamOperator = orderDataStreamSource.map(new MapFunction<String, String>() {

            @Override
            public String map(String order) throws Exception {

                return order;
            }
        });

        //实例化kafka生产者对象
//        KafkaSink<String> kafkaProducer =KafkaSink.<String>builder()
//                .setBootstrapServers("192.168.202.10:9092")
//                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
//                        .setTopic("testTopic")
//                        .setValueSerializationSchema(new SimpleStringSchema())
////                        .se(DeliveryGuarantee.AT_LEAST_ONCE)
//                        .build()
//                ).build();
//
//        //TODO 5）将转换后的字符串实时写入到kafka集群
//        outputStreamOperator.sinkTo(kafkaProducer);

        outputStreamOperator.print();

        //TODO 6）递交作业
        env.execute();

    }

}
