package net.bwie.jtp.dws.job;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

public class JtpOrderProvinceAggregateDwsJob {
    public static void main(String[] args) {
        // TODO: 2025/6/3 表执行环境 
        TableEnvironment  tabEnv = getTableEnv();
        // TODO: 2025/6/3 输入表input映射到kafka消息队列 
        createInputTable(tabEnv);
        // TODO: 2025/6/3 数据处理 
        Table reporTable = handel(tabEnv);
        // TODO: 2025/6/3 输出表output映射到Doris表
        createOutputTable(tabEnv);
        // TODO: 2025/6/3 保存数据
        saveToDoris(tabEnv,reporTable);
        
        
        
    }

    private static void saveToDoris(TableEnvironment tabEnv, Table reporTable) {
        tabEnv.createTemporaryView("report_table",reporTable);
        tabEnv.executeSql(
                "insert into dws_trade_province_order_report_doris_sink\n" +
                        "select\n" +
                        "  SUBSTRING(CAST(start_time as string),0,19) as start_time,\n" +
                        "  SUBSTRING(CAST(end_time as string),0,19) as end_time,\n" +
                        "  SUBSTRING(CAST(start_time as string),0,10) as cur_date,\n" +
                        "  province_id,province_name,region_id,region_name,\n" +
                        "  order_count,order_amount,activity_amount,coupon_amount\n" +
                        "from\n" +
                        "    report_table"
        );
    }

    private static void createOutputTable(TableEnvironment tabEnv) {
       tabEnv.executeSql(
               " create table dws_trade_province_order_report_doris_sink(\n" +
                       "     start_time string,\n" +
                       "     end_time string,\n" +
                       "     cur_date string,\n" +
                       "      province_id string,\n" +
                       "      province_name string,\n" +
                       "      region_id string,\n" +
                       "      region_name string,\n" +
                       "      order_count bigint,\n" +
                       "      order_amount decimal(16,2),\n" +
                       "      activity_amount decimal(16,2),\n" +
                       "      coupon_amount decimal(16,2)\n" +
                       " )with(\n" +
                       "     'connector' = 'doris',\n" +
                       "     'fenodes' = 'node102:8030',\n" +
                       "     'table.identifier' = 'jtp_mall_report.dws_trade_province_order_report',\n" +
                       "     'username' = 'root',\n" +
                       "     'password' = '123456',\n" +
                       "      'sink.batch.interval'= '10s',\n" +
                       "     'sink.max-retries' = '3',\n" +
                       "     'sink.batch.size' = '1000'\n" +
                       " )"
       );
    }

    private static Table handel(TableEnvironment tabEnv) {
        // s1-设置窗口，省份分组，聚合计算
        Table resultTable = tabEnv.sqlQuery(
                // 选择语句开始，用于从窗口函数处理后的数据中选择所需字段
                "SELECT\n" +
                    // 将窗口开始时间格式化为人类可读的字符串格式
                    "    DATE_FORMAT(window_start, 'yyyy-MM-dd HH:mm:ss') AS start_time\n" +
                    // 将窗口结束时间格式化为人类可读的字符串格式
                    "    , DATE_FORMAT(window_end, 'yyyy-MM-dd HH:mm:ss') AS end_time\n" +
                    // 选择省份ID
                    "    , province_id\n" +
                    // 计算窗口内不同订单的数量
                    "    , count(DISTINCT order_id) AS order_count\n" +
                    // 计算窗口内所有订单金额的总和，转换为decimal以确保精度
                    "    , sum(cast(split_total_amount AS DECIMAL(10, 2))) AS order_amount\n" +
                    // 计算窗口内所有活动减免金额的总和，空值以0计，转换为decimal以确保精度
                    "    , sum(cast(if(split_activity_amount IS NULL, '0', split_activity_amount) AS DECIMAL(10, 2)))  AS activity_amount\n" +
                    // 计算窗口内所有优惠券减免金额的总和，空值以0计，转换为decimal以确保精度
                    "    , sum(cast(if(split_coupon_amount IS NULL, '0', split_coupon_amount) AS DECIMAL(10, 2)))  AS coupon_amount\n" +
                    // 获取处理时间
                    "    , PROCTIME() AS proc_time\n" +
                   // 从表函数TUMBLE的结果中选择数据，该函数将数据按时间窗口分组
                    "FROM TABLE (\n" +
                    // 使用TUMBLE函数对订单详情数据进行窗口化处理，每个窗口长度为1分钟
                    "    TUMBLE(TABLE dwd_order_detail_kafka_source, DESCRIPTOR(create_time), INTERVAL '1' MINUTES)\n" +
                    // 结束FROM子句
                ")\n" +
                // 过滤条件，排除用户ID或来源类型名称为空的记录
                "WHERE user_id IS NOT NULL AND source_type_name IS NOT NULL\n" +
                // 按窗口开始时间、结束时间和省份ID分组聚合数据
                "GROUP BY window_start, window_end, province_id"
        );
//resultTable.execute().print();
        tabEnv.createTemporaryView("result_table", resultTable);

// s2-维度表：dim_base_province
        tabEnv.executeSql(
                // 创建表dim_base_province_hbase_source，用于存储省份信息
                "CREATE TABLE dim_base_province_hbase_source (\n" +
                // 定义行键字段，类型为STRING
                "    row_key STRING,\n" +
                // 定义信息字段，包含地区代码、ID、ISO 3166-2代码、ISO代码、名称和区域ID
                "    info ROW<area_code STRING, id STRING, iso_3166_2 STRING, iso_code STRING, name STRING, region_id STRING>,\n" +
                // 设置主键为row_key，但不强制执行唯一性约束
                "    PRIMARY KEY (row_key) NOT ENFORCED\n" +
                ") WITH (\n" +
                // 配置HBase连接器版本为2.2
                "    'connector' = 'hbase-2.2',\n" +
                // 指定HBase中的表名为dim_base_province
                "    'table-name' = 'dim_base_province',\n" +
                // 配置ZooKeeper集群地址，用于HBase的协调服务
                "    'zookeeper.quorum' = 'node101:2181,node102:2181,node103:2181',\n" +
                // 启用异步查找模式，提高查询效率
                "    'lookup.async' = 'true',\n" +
                // 设置查找缓存的最大行数为34行
                "    'lookup.cache.max-rows' = '34',\n" +
                // 设置查找缓存的存活时间为1小时
                "    'lookup.cache.ttl' = '1 hour'\n" +
                ")"
        );
// tabEnv.executeSql("SELECT * FROM dim_base_province_hbase_source").print();

//   维度表：dim_base_region
        tabEnv.executeSql(
                // 创建表dim_base_region_hbase_source，用于定义从HBase获取区域基础数据的源表结构
                "CREATE TABLE dim_base_region_hbase_source (\n" +
                // 定义表的主键为row_key，用于唯一标识每条记录
                "    row_key STRING,\n" +
                // 定义info列，包含区域的ID和名称，以ROW类型存储
                "    info ROW<id STRING, region_name STRING>,\n" +
                // 设置主键约束，但不强制执行，以适应HBase的特性
                "    PRIMARY KEY (row_key) NOT ENFORCED\n" +
                ") WITH (\n" +
                // 配置HBase连接器的版本为2.2
                "    'connector' = 'hbase-2.2',\n" +
                // 指定HBase中的表名为dim_base_region
                "    'table-name' = 'dim_base_region',\n" +
                // 配置ZooKeeper集群地址，用于连接HBase
                "    'zookeeper.quorum' = 'node101:2181,node102:2181,node103:2181',\n" +
                // 启用异步查询，提高查询效率
                "    'lookup.async' = 'true',\n" +
                // 配置查询缓存的最大行数，以优化查询性能
                "    'lookup.cache.max-rows' = '34',\n" +
                // 配置查询缓存的存活时间，平衡缓存效果和数据新鲜度
                "    'lookup.cache.ttl' = '1 hour'\n" +
                ")"
        );

//      关联省份维表数据
        Table reportTable = tabEnv.sqlQuery(
                // 构建SQL查询语句以获取特定数据
                "SELECT\n" +
                    // 选择开始时间、结束时间和省份ID字段
                    "    t1.start_time,\n" +
                    "    t1.end_time,\n" +
                    "    t1.province_id,\n" +
                    // 通过LEFT JOIN关联省份名称和区域ID
                    "    t2.name AS province_name,\n" +
                    "    t2.region_id,\n" +
                    // 通过LEFT JOIN关联区域名称
                    "    t3.region_name,\n" +
                    // 选择订单数量、订单金额、活动金额和优惠券金额字段
                    "    t1.order_count,\n" +
                    "    t1.order_amount,\n" +
                    "    t1.activity_amount,\n" +
                    "    t1.coupon_amount,\n" +
                    // 添加当前时间戳字段
                    "    UNIX_TIMESTAMP() * 1000 AS ts\n" +
                // 从结果表t1开始查询
                "FROM result_table t1\n" +
                    // 关联省份维度表，使用系统时间点进行时间旅行查询
                    "    LEFT JOIN dim_base_province_hbase_source FOR SYSTEM_TIME AS OF t1.proc_time AS t2\n" +
                    "       ON t1.province_id = t2.row_key\n" +
                    // 关联区域维度表，使用系统时间点进行时间旅行查询
                    "    LEFT JOIN dim_base_region_hbase_source FOR SYSTEM_TIME AS OF t1.proc_time AS t3\n" +
                    "       ON t2.region_id = t3.row_key"
        );
        //reportTable.execute().print();

        // 返回结果
        return reportTable ;
    }

    private static void createInputTable(TableEnvironment tabEnv) {
        // TODO: 2025/6/3 创建表dwd_order_detail_kafka_source，用于从Kafka源中读取订单详情数据
        tabEnv.executeSql(
                "CREATE TABLE dwd_order_detail_kafka_source(\n" +
                        "    `id` STRING,\n" +
                        "    `order_id` STRING,\n" +
                        "    `user_id` STRING,\n" +
                        "    `order_status` STRING,\n" +
                        "    `sku_id` STRING,\n" +
                        "    `sku_name` STRING,\n" +
                        "    `province_id` STRING,\n" +
                        "    `activity_id` STRING,\n" +
                        "    `activity_rule_id` STRING,\n" +
                        "    `coupon_id` STRING,\n" +
                        "    `date_id` STRING,\n" +
                        "    `create_time` TIMESTAMP(3),\n" +
                        "    `operate_date_id` STRING,\n" +
                        "    `operate_time` STRING,\n" +
                        "    `source_id` STRING,\n" +
                        "    `source_type` STRING,\n" +
                        "    `source_type_name` STRING,\n" +
                        "    `sku_num` STRING,\n" +
                        "    `split_original_amount` STRING,\n" +
                        "    `split_activity_amount` STRING,\n" +
                        "    `split_coupon_amount` STRING,\n" +
                        "    `split_total_amount` STRING,\n" +
                        "    `row_op_ts` STRING,\n" +
                        "    `proc_time` AS PROCTIME(),\n" +
                        "    WATERMARK FOR create_time AS create_time - INTERVAL '0' MINUTE\n" +
                        ") WITH (\n" +
                        // 配置Kafka连接器的属性
                        "    'connector' = 'kafka',\n" +
                        // 指定Kafka主题
                        "    'topic' = 'dwd-order-detail',\n" +
                        // 设置Kafka集群的引导服务器地址
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        // 设置消费者组ID
                        "    'properties.group.id' = 'gid-dws-trade-sku-order',\n" +
                        // 配置启动模式，从最早的偏移量开始消费
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        // 使用JSON格式进行数据序列化和反序列化
                        "    'format' = 'json',\n" +
                        // 配置当缺少字段时不会抛出异常
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        // 配置忽略JSON解析错误
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );
    }

    private static TableEnvironment getTableEnv() {
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner().inStreamingMode().build();

        // 根据配置的环境设置创建TableEnvironment实例
        TableEnvironment tabEnv = TableEnvironment.create(settings);

        // 获取并配置TableEnvironment的配置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        // 设置本地时间区域为Asia/Shanghai，以便时间处理操作使用正确的时区
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        // 设置默认并行度为1，控制执行的并行任务数
        configuration.setString("table.exec.resource.default-parallelism", "1");
        // 设置状态生存时间（TTL）为5秒，自动清理过期的状态数据
        configuration.setString("table.exec.state.ttl", "5 s");

        // 返回配置好的TableEnvironment实例
        return tabEnv;
    }
}
