package net.bwie.dt.job;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;

public class DorisToKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);

        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone","Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism","1");
        configuration.setString("table.exec.state.ttl","5 s");
        configuration.setString("execution.checkpointing.interval","30 s");
        // Doris 表
        tableEnv.executeSql(
                "CREATE TABLE dws_order_detail (" +
                        "order_id INT," +
                        "user_id INT," +
                        "produce_id INT," +
                        "quantity INT," +
                        "price DECIMAL(10, 2)," +
                        "status VARCHAR(255)," +
                        "pay_amount DECIMAL(10, 2)," +
                        "pay_time TIMESTAMP(3)," +
                        "number INT," +
                        "action_type VARCHAR(255)," +
                        "page_type VARCHAR(255)," +
                        "event_type VARCHAR(255)," +
                        "produce_name VARCHAR(128)" +
                        ") WITH (" +
                        "'connector' = 'jdbc'," +
                        "'url' = 'jdbc:mysql://node101:9030/dt_realtime_ranking?useUnicode=true&characterEncoding=UTF-8'," +
                        "'table-name' = 'dws_order_detail'," +
                        "'username' = 'root'," +
                        "'password' = '123456'" +
                        ")"
        );

        // Kafka Sink
        tableEnv.executeSql(
                "CREATE TABLE kafka_sink (" +
                        "order_id INT," +
                        "user_id INT," +
                        "produce_id INT," +
                        "quantity INT," +
                        "price DECIMAL(10, 2)," +
                        "status VARCHAR(255)," +
                        "pay_amount DECIMAL(10, 2)," +
                        "pay_time TIMESTAMP(3)," +
                        "number INT," +
                        "action_type VARCHAR(255)," +
                        "page_type VARCHAR(255)," +
                        "event_type VARCHAR(255)," +
                        "produce_name VARCHAR(128)" +
                        ") WITH (" +
                        "'connector' = 'kafka'," +
                        "'topic' = 'doris_sync_topic'," +
                        "'properties.bootstrap.servers' = 'node101:9092'," +
                        "'format' = 'json'," +
                        "'scan.startup.mode' = 'earliest-offset'" +
                        ")"
        );

        // 数据同步
        tableEnv.executeSql(
                "INSERT INTO kafka_sink SELECT * FROM dws_order_detail"
        ).await();
    }
}
