package com.chenzhiling.study;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * Author: CHEN ZHI LING
 * Date: 2022/11/18
 * Description:
 */
public class KafkaFlinkDelta {

    public static void main(String[] args) throws Exception {

        String topic = "topic";
        String consumer = "kafka consumer ip";
        String lakePathNoPartition = "lakePath";

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000);

        Properties properties = FlinkDeltaUtil.getProperties(topic, consumer);
        FlinkKafkaConsumer<String> source = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), properties);

        env.addSource(source)
                .setParallelism(2)
                .map(FlinkDeltaUtil::kafkaJsonToRowData)
                .sinkTo(FlinkDeltaUtil.createDeltaSink(lakePathNoPartition, FlinkDeltaUtil.getKafkaRowType()))
                .setParallelism(1);
        env.execute("Flink-Read-Kafka-Json-To-Delta");
    }
}
