package com.sdses.flink.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author jiwei
 * @description
 * @date 2024/10/12 12:31
 */
public class FlinkSQL_Kafka2Doris {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(5);
        env.enableCheckpointing(10000);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //Source
        tableEnv.executeSql("CREATE TABLE sourceTable (\n" +
                " `wdy_id` VARCHAR(64) NULL COMMENT '温度仪设备表ID', \n" +
                " `real_time` STRING NULL COMMENT '设备上传时间', \n" +
                " `save_time` STRING NULL COMMENT '入库保存时间', \n" +
                " `task_time` STRING NULL COMMENT '定时时间', \n" +
                " `id` BIGINT NULL COMMENT '主键', \n" +
                " `pd_id` VARCHAR(64) NULL COMMENT '原始数据表ID', \n" +
                " `mpno` VARCHAR(64) NULL COMMENT '设备上传计量点', \n" +
                " `equip_no` VARCHAR(64) NULL COMMENT '设备上传表号', \n" +
                " `equip_type_code` VARCHAR(64) NULL COMMENT '设备上传表类型', \n" +
                " `wdy_temp` DECIMAL(18, 2) NULL COMMENT '当前温度', \n" +
                " `wdy_hum` DECIMAL(18, 2) NULL COMMENT '当前湿度', \n" +
                " `alarm_status` VARCHAR(64) NULL COMMENT '报警代码', \n" +
                " `error_detail` VARCHAR(6000) NULL COMMENT '报警内容', \n" +
                " `extend_field` VARCHAR(6000) NULL COMMENT '报警内容', \n" +
                " `tz_wdy_temp` DECIMAL(18, 2) NULL COMMENT '调整后的温度', \n" +
                " `temperature_deviation` DECIMAL(18, 2) NULL COMMENT '温度偏差', \n" +
                " `status` INT NULL COMMENT '1:成功  2:表计报警  4:表计通讯失败 8:采集模块忙  16:EB90解析错误 32:通讯模块离线  64:服务器程序无响应  128:数据清洗 256:数据报警', \n" +
                " `abnormal_data` VARCHAR(2000) NULL COMMENT '异常具体数据' \n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 't_data_wdy_real',\n" +   //
                "  'properties.bootstrap.servers' = '172.21.32.233:39092,172.21.32.234:39092,172.21.32.235:39092',\n" +
                "  'properties.group.id' = 'test_2024101212',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +  //'earliest-offset'，'latest-offset'，'group-offsets'，'timestamp' 和 'specific-offsets'
                "  'json.ignore-parse-errors' = 'true',\n" +
                "  'format' = 'json'\n" +
                ")");

        //DorisSink
        tableEnv.executeSql("CREATE TABLE sinkTable WITH (\n" +
                "'connector' = 'doris',\n" +
                "'fenodes' = '172.21.32.230:38030,172.21.32.231:38030,172.21.32.232:38030',\n" +
                "'table.identifier' = 'db_datacube_doris_test.test_ods_sc01_rlwlyyxt_t_data_wdy_real_1',\n" +
                "'username' = 'datacube_doris_test',\n" +
                "'password' = 'Nx8Tz_Y8v3x',\n" +
                "'sink.properties.format' = 'json',\n" +
                "'sink.properties.read_json_by_line' = 'true',\n" +
                "'sink.enable-delete' = 'true',\n" +
                "'sink.label-prefix' = 'doris_label_2024101212'\n" +
                ") LIKE sourceTable (EXCLUDING ALL)");
        //Insert
        tableEnv.executeSql("insert into sinkTable select * from sourceTable");
    }
}
