package org.apache.flink.doris;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * This example mainly demonstrates how to use flink to stream Kafka data.
 * And use the doris streamLoad method to write the data into the table specified by doris
 * <p>
 * Kafka data format is an array, For example: ["id":1,"name":"root"]
 *
 * @author SuperWein
 */
public class FlinkKafka2Doris {

    /**
     * kafka address
     */
    private static final String bootstrapServer = "10.172.32.19:9098,10.172.32.20:9098,10.172.32.21:9098,10.172.32.23:9098";
    /**
     * kafka groupName
     */
    private static final String groupName = "flink_doris_group";
    /**
     * kafka topicName
     */
    private static final String topicName = "flink_doris";
    /**
     * doris ip port
     */
    private static final String hostPort = "10.172.49.243:8030";
    /**
     * doris dbName
     */
    private static final String dbName = "ydads";
    /**
     * doris tbName
     */
    private static final String tbName = "order";
    /**
     * doris userName
     */
    private static final String userName = "root";
    /**
     * doris password
     */
    private static final String password = "123456";
    /**
     * doris columns
     */
    private static final String columns = "id,name,age,price,sale";
    /**
     * json format
     */
    private static final String jsonFormat = "[\"$.id\",\"$.name\",\"$.age\",\"$.price\",\"$.sale\"]";

    public static void main(String[] args) throws Exception {
        Properties props = new Properties();
        props.put("bootstrap.servers", bootstrapServer);
        props.put("group.id", groupName);
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "earliest");
        props.put("max.poll.records", "10000");

        StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        blinkStreamEnv.enableCheckpointing(10000);
        blinkStreamEnv.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>(topicName,
                new SimpleStringSchema(),
                props);

        DataStreamSource<String> dataStreamSource = blinkStreamEnv.addSource(flinkKafkaConsumer);

        DorisStreamLoad dorisStreamLoad = new DorisStreamLoad(hostPort, dbName, tbName, userName, password);

        dataStreamSource.addSink(new DorisSink(dorisStreamLoad, columns, jsonFormat));

        blinkStreamEnv.execute("flink kafka to doris");

    }

}
