package net.bwie.mall.dwd.order.job;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;


/**
 * @ClassName：JtpOrderDetailDwdJob
 * @Author: ZddddQ
 * @Date: 2025/6/3 21:45
 * @Description: 必须描述类做什么事情, 实现什么功能
 */
public class JtpOrderDetailDwdJob {

    public static void main(String[] args) {

        // 1. 表执行环境
        TableEnvironment tabEnv = getTableEnv() ;

        // 2. 输入表-input：映射到Kafka消息队列
        createInputTable(tabEnv);

        // 3. 数据处理-select
        Table resultTable = handle(tabEnv);

        // 4. 输出表-output：映射到Kafka队列
        createOutputTable(tabEnv) ;

        // 5. 保存数据-insert
        saveToKafka(tabEnv, resultTable) ;

    }

    private static void saveToKafka(TableEnvironment tabEnv, Table resultTable) {

        tabEnv.createTemporaryView("result_table", resultTable);

        tabEnv.executeSql(
                "INSERT INTO dwd_order_detail_kafka_sink\n" +
                        "SELECT\n" +
                        "    id, order_id, user_id, order_status, sku_id, sku_name, province_id,\n" +
                        "       activity_id, activity_rule_id, coupon_id, date_id, create_time,\n" +
                        "       operate_date_id, operate_time, source_id, source_type, source_type_name,\n" +
                        "       sku_num, split_original_amount, split_activity_amount, split_coupon_amount,\n" +
                        "       split_total_amount, row_op_ts\n" +
                        "FROM result_table"
        );

    }

    // 创建输出表-output
    private static void createOutputTable(TableEnvironment tabEnv) {

        tabEnv.executeSql(
                "CREATE TABLE dwd_order_detail_kafka_sink (\n" +
                        "    id STRING,\n" +
                        "    order_id STRING,\n" +
                        "    user_id STRING,\n" +
                        "    order_status STRING,\n" +
                        "    sku_id STRING,\n" +
                        "    sku_name STRING,\n" +
                        "    province_id STRING,\n" +
                        "    activity_id STRING,\n" +
                        "    activity_rule_id STRING,\n" +
                        "    coupon_id STRING,\n" +
                        "    date_id STRING,\n" +
                        "    create_time STRING,\n" +
                        "    operate_date_id STRING,\n" +
                        "    operate_time STRING,\n" +
                        "    source_id STRING,\n" +
                        "    source_type STRING,\n" +
                        "    source_type_name STRING,\n" +
                        "    sku_num STRING,\n" +
                        "    split_original_amount STRING,\n" +
                        "    split_activity_amount STRING,\n" +
                        "    split_coupon_amount STRING,\n" +
                        "    split_total_amount STRING,\n" +
                        "    row_op_ts TIMESTAMP_LTZ(3),\n" +
                        "    PRIMARY KEY (id) NOT ENFORCED\n" +
                        ") WITH (\n" +
                        "    'connector' = 'upsert-kafka',\n" +
                        "    'topic' = 'dwd-order-detail',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'key.format' = 'json',\n" +
                        "    'value.format' = 'json'\n" +
                        ")"
        );

    }

    private static Table handle(TableEnvironment tabEnv) {

        Table order_detail_table = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    operator_data['id'] AS id,\n" +
                        "    operator_data['order_id'] AS order_id,\n" +
                        "    operator_data['sku_id'] AS sku_id,\n" +
                        "    operator_data['sku_name'] AS sku_name,\n" +
                        "    operator_data['create_time'] AS create_time,\n" +
                        "    operator_data['source_id'] AS source_id,\n" +
                        "    operator_data['source_type'] AS source_type,\n" +
                        "    operator_data['sku_num'] AS sku_num,\n" +
                        "    cast(\n" +
                        "        cast(operator_data['sku_num'] AS DECIMAL(16, 2)) * cast(operator_data['order_price'] AS DECIMAL(16, 2)) AS STRING\n" +
                        "    ) AS split_original_amount,\n" +
                        "    operator_data['split_total_amount'] AS split_total_amount,\n" +
                        "    operator_data['split_activity_amount'] AS split_activity_amount,\n" +
                        "    operator_data['split_coupon_amount'] AS split_coupon_amount,\n" +
                        "    proc_time\n" +
                        "FROM topic_db_kafka_source\n" +
                        "WHERE `table_name` = 'order_detail' AND operator_type = 'insert'"
        );

        tabEnv.createTemporaryView("order_detail_table", order_detail_table);

        Table order_info_table = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    operator_data['id'] AS id,\n" +
                        "    operator_data['user_id'] AS user_id,\n" +
                        "    operator_data['province_id'] AS province_id,\n" +
                        "    operator_data['operate_time'] AS operate_time,\n" +
                        "    operator_data['order_status'] AS order_status\n" +
                        "FROM topic_db_kafka_source\n" +
                        "WHERE `table_name` = 'order_info'\n" +
                        "  AND (operator_type = 'insert' OR operator_type = 'update')"
        );

        tabEnv.createTemporaryView("order_info_table", order_info_table);

        Table order_detail_activity_table = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    operator_data['order_detail_id'] AS order_detail_id,\n" +
                        "    operator_data['activity_id'] AS activity_id,\n" +
                        "    operator_data['activity_rule_id'] AS activity_rule_id\n" +
                        "FROM topic_db_kafka_source\n" +
                        "WHERE `table_name` = 'order_detail_activity' AND operator_type = 'insert'"
        );

        tabEnv.createTemporaryView("order_detail_activity_table", order_detail_activity_table);

        Table order_detail_coupon_table = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    operator_data['order_detail_id'] AS order_detail_id,\n" +
                        "    operator_data['coupon_id'] AS coupon_id\n" +
                        "FROM topic_db_kafka_source\n" +
                        "WHERE `table_name` = 'order_detail_coupon' AND operator_type = 'insert'"
        );

        tabEnv.createTemporaryView("order_detail_coupon_table", order_detail_coupon_table);

        tabEnv.executeSql(
                "CREATE TABLE dim_base_dic_hbase_source (\n" +
                        "    row_key STRING,\n" +
                        "    info ROW<dic_code STRING, dic_name STRING, parent_code STRING>,\n" +
                        "    PRIMARY KEY (row_key) NOT ENFORCED\n" +
                        ") WITH (\n" +
                        "    'connector' = 'hbase-2.2',\n" +
                        "    'table-name' = 'dim_base_dic',\n" +
                        "    'zookeeper.quorum' = 'node101:2181,node102:2181,node103:2181',\n" +
                        "    'lookup.async' = 'true',\n" +
                        "    'lookup.cache.max-rows' = '10',\n" +
                        "    'lookup.cache.ttl' = '1 hour'\n" +
                        ")"
        );

        Table joinTable  = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    detail.id,\n" +
                        "    detail.order_id,\n" +
                        "    info.user_id,\n" +
                        "    info.order_status,\n" +
                        "    detail.sku_id,\n" +
                        "    detail.sku_name,\n" +
                        "    info.province_id,\n" +
                        "    activity.activity_id,\n" +
                        "    activity.activity_rule_id,\n" +
                        "    coupon.coupon_id,\n" +
                        "    date_format(detail.create_time, 'yyyy-MM-dd') AS date_id,\n" +
                        "    detail.create_time,\n" +
                        "    date_format(info.operate_time, 'yyyy-MM-dd') AS operate_date_id,\n" +
                        "    info.operate_time,\n" +
                        "    detail.source_id,\n" +
                        "    detail.source_type,\n" +
                        "    dic.dic_name AS source_type_name,\n" +
                        "    detail.sku_num,\n" +
                        "    detail.split_original_amount,\n" +
                        "    detail.split_activity_amount,\n" +
                        "    detail.split_coupon_amount,\n" +
                        "    detail.split_total_amount,\n" +
                        "    current_row_timestamp() AS row_op_ts\n" +
                        "FROM order_detail_table detail\n" +
                        "    JOIN order_info_table info ON detail.order_id = info.id\n" +
                        "    LEFT JOIN order_detail_activity_table activity ON detail.id = activity.order_detail_id\n" +
                        "    LEFT JOIN order_detail_coupon_table coupon ON detail.id = coupon.order_detail_id\n" +
                        "    LEFT JOIN dim_base_dic_hbase_source FOR SYSTEM_TIME AS OF detail.proc_time AS dic\n" +
                        "    ON detail.source_type = dic.row_key"
        );

        return joinTable;

    }


    //  创建输入表-input：映射到Kafka消息队列
    private static void createInputTable(TableEnvironment tabEnv) {

        //  输入表-input：映射到Kafka消息队列
        tabEnv.executeSql(
                "CREATE TABLE `topic_db_kafka_source`(\n" +
                        "    `operator_type` STRING,\n" +
                        "    `operator_data` MAP<STRING, STRING>,\n" +
                        "    `db_name` STRING,\n" +
                        "    `table_name` STRING,\n" +
                        "    `ts` BIGINT,\n" +
                        "    `proc_time` AS PROCTIME()\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'topic-mall',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid-dwd-order-detail',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );

    }

    //  获取表执行环境
    private static TableEnvironment getTableEnv() {

        // 1环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings) ;

        // 2配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");

        // 3返回对象
        return tabEnv;

    }

}
