package com.bw.data_summary;

import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//车辆出入
public class dwd_vehicle_cars {
    public static void main(String[] args) throws Exception {
        //1.获取Stream的执行环境
        StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment();
        senv.setParallelism(1);

        //2.创建表的执行环境
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(senv);

        // 车辆日志表  cars_log
        tEnv.executeSql("CREATE TABLE bean_cars_log (\n" +
                " id STRING,\n" +
                " opTime STRING,\n" +
                " ctype INT,\n" +
                " carCode STRING,\n" +
                " cId INT,\n" +
                        // 定义处理时间用于维表关联
                " proc_time AS PROCTIME(), \n" +
                " PRIMARY KEY (id) NOT ENFORCED\n" +
                ") WITH (\n" +
                " 'connector' = 'mysql-cdc',\n" +
                " 'hostname' = 'node101',\n" +
                " 'port' = '3306',\n" +
                " 'username' = 'root',\n" +
                " 'password' = '123456',\n" +
                " 'database-name' = 'sca',\n" +
                " 'table-name' = 'cars_Log', \n" +
                " 'scan.incremental.snapshot.chunk.key-column' = 'id'\n" +  // 添加这一行
                ")\n");

//        tEnv.executeSql("select * from bean_cars_log limit 10").print();


        // 车辆维度表
        tEnv.executeSql("CREATE TABLE dim_cars (\n" +
                " id INT,\n" +
                " owerId INT,\n" +
                " carCode STRING,\n" +
                " carColor STRING,\n" +
                " type TINYINT,\n" +
                " remark STRING \n" +
                ") WITH (\n" +
                " 'connector' = 'doris',\n" +
                " 'fenodes' = 'node102:8030',\n" +
                " 'jdbc-url' = 'jdbc:mysql://node102:9030',  \n" +
                " 'username' = 'root',\n" +
                " 'password' = '123456',\n" +
                " 'table.identifier' = 'sca_doris.cars',\n" +
                " 'lookup.cache.max-rows' = '10000',\n" +
                " 'lookup.cache.ttl' = '60s'\n" +
                ")\n");

        // 创建宽表关联查询
        Table resultTable = tEnv.sqlQuery("SELECT " +
                "o1.id, " +
                "d1.owerId, " +
                "o1.opTime, " +
                "o1.cId, " +
                "o1.carCode, " +
                "o1.ctype " +
                "FROM bean_cars_log o1 " +
                "LEFT JOIN dim_cars FOR SYSTEM_TIME AS OF o1.proc_time AS d1 " +
                "ON o1.carCode = d1.carCode");

        tEnv.createTemporaryView("resultTable",resultTable);

//        创建目标表 (Upsert Kafka)
        tEnv.executeSql("CREATE TABLE dwd_cars_log (\n" +
                " id STRING,\n" +
                // 成员id
                " owerId INT,\n" +
                " opTime STRING,\n" +
                // 小区id
                " cId BIGINT,\n" +
                // 车牌号
                " carCode STRING,\n" +
                " ctype INT,\n" +
                " PRIMARY KEY (id) NOT ENFORCED\n" +
                ") WITH (\n" +
                " 'connector' = 'upsert-kafka',\n" +
                " 'topic' = 'dwd_cars_log',\n" +
                " 'properties.bootstrap.servers' = 'node101:9092',\n" +
                " 'key.format' = 'json',\n" +
                " 'value.format' = 'json'\n" +
                ")\n");

//         写入结果到目标表
        tEnv.executeSql("INSERT INTO dwd_cars_log SELECT * FROM resultTable");

    }
}
