package server_timu;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;

/*

   2、使用Flink消费Kafka中topic为ods_mall_log的数据，根据数据中不同的表前缀区分，将数据分别分发
   至kafka的DWD层的dim_customer_login_log的Topic中，其他的表则无需处理；
 */
public class T2 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //  todo 创建kafka_source
        KafkaSource<String> kafka_source = KafkaSource.<String>builder()
                .setBootstrapServers("192.168.40.110:9092")
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setGroupId("ods_mall_log_group")
                .setTopics("ods_mall_log")
                .build();


        //  todo 得到数据
        DataStreamSource<String> data = env.fromSource(
                kafka_source,
                WatermarkStrategy.noWatermarks(),
                "kafka_source"
        );

        //  todo 过滤数据
        SingleOutputStreamOperator<String> filter_data = data.filter(strings -> {
            return strings.startsWith("customer_point_log");
        });

        //  打印数据
        filter_data.printToErr();

        //  todo 创建kafka_sink
        KafkaSink<String> kafka_sink = KafkaSink.<String>builder()
                .setBootstrapServers("192.168.40.110:9092")
                .setRecordSerializer(
                        KafkaRecordSerializationSchema.builder()
                                .setTopic("dim_customer_login_log")
                                .setValueSerializationSchema(new SimpleStringSchema())
                                .build()
                )
                .build();

        //  写入kafka
       filter_data.sinkTo(kafka_sink);


        env.execute();



    }
}
