package com.wudl.flink.doris;

import com.wudl.flink.doris.sink.DorisSink;
import com.wudl.flink.doris.source.GenerateData;
import com.wudl.flink.doris.utils.DorisStreamLoad;
import com.wudl.flink.doris.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.util.Properties;

/**
 * @author ：wudl
 * @date ：Created in 2022-01-01 13:15
 * @description：
 * @modified By：
 * @version: 1.0
 */

public class DorisApp {

    private static final String bootstrapServer = "192.168.1.161:6667";

    private static final String groupName = "flink_doris_group006";

    private static final String topicName = "wudltopicdoris01";

    private static final String hostPort = "192.168.1.161:8090";

    private static final String dbName = "wudldb";

    private static final String tbName = "wudl_doris01";

    private static final String userName = "root";

    private static final String password = "";

//    private static final String columns = "id,name,address,city,phone";
    private static final String columns = "address,city,id,name,phone";
//"address":"广东省","city":"海南","id":183,"name":"2022-01-03 00:41:37","phone":"15007840220"}

//    private static final String jsonFormat = "[\"$.name\",\"$.age\",\"$.price\",\"$.sale\"]";
    private static final String jsonFormat = "[\"$.address\",\"$.city\",\"$.id\",\"$.name\",\"$.phone\"]";
//    private static final String jsonFormat =   "[\"$.address\",\"$.city\",\"$.id\",\"$.name\",\"$.phone\"]";

    public static void main(String[] args) throws Exception {
        Properties props = new Properties();
        props.put("bootstrap.servers", bootstrapServer);
        props.put("group.id", groupName);
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        props.put("auto.offset.reset", "earliest");
//        props.put("max.poll.records", "10000");

        SimpleStringSchema simpleStringSchema = new SimpleStringSchema();
        StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
//        blinkStreamEnv.setParallelism(1);
        blinkStreamEnv.enableCheckpointing(10000);
        blinkStreamEnv.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>(topicName,
                simpleStringSchema,
                props);

//        DataStreamSource<String> dataStreamSource = blinkStreamEnv.socketTextStream("192.168.1.163", 9999);

//        DataStreamSource<String> dataStreamSource = blinkStreamEnv.addSource(flinkKafkaConsumer);
                DataStreamSource<String> dataStreamSource = blinkStreamEnv.addSource(new GenerateData());

        dataStreamSource.map(new MapFunction<String, String>() {
            @Override
            public String map(String s) throws Exception {
                System.out.println(s);
                return s;
            }
        });


        DorisStreamLoad dorisStreamLoad = new DorisStreamLoad(hostPort, dbName, tbName, userName, password);

        dataStreamSource.addSink(new DorisSink(dorisStreamLoad, columns, jsonFormat));

        blinkStreamEnv.execute("flink kafka to doris");

    }
}
