package com.lxj.app.dws;

import com.lxj.utils.ClickHouseUtil;
import com.lxj.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * DWS层商品维度聚合应用 - 使用JDBC连接ClickHouse
 */
public class DwsProductStats {

    public static void main(String[] args) throws Exception {

        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 设置状态TTL
        tableEnv.getConfig().setIdleStateRetention(Duration.ofHours(1));

        // TODO 2. 创建Kafka源表 - 页面日志
        String pageLogDDL = "CREATE TABLE dwd_traffic_page_log (\n" +
                "  `common` MAP<STRING, STRING>,\n" +
                "  `page` MAP<STRING, STRING>,\n" +
                "  `ts` BIGINT,\n" +
                "  `rt` AS TO_TIMESTAMP_LTZ(ts, 3),\n" +
                "  WATERMARK FOR rt AS rt - INTERVAL '5' SECOND\n" +
                ") " + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log", "dws_product_stats");

        tableEnv.executeSql(pageLogDDL);

        // TODO 3. 创建Kafka源表 - 订单明细
        String orderDetailDDL = "CREATE TABLE dwd_trade_order_detail (\n" +
                "  `id` STRING,\n" +
                "  `order_id` STRING,\n" +
                "  `user_id` STRING,\n" +
                "  `sku_id` STRING,\n" +
                "  `sku_name` STRING,\n" +
                "  `sku_num` BIGINT,\n" +
                "  `order_price` DECIMAL(16, 2),\n" +
                "  `create_time` STRING,\n" +
                "  `source_id` STRING,\n" +
                "  `split_total_amount` DECIMAL(16, 2),\n" +
                "  `split_activity_amount` DECIMAL(16, 2),\n" +
                "  `split_coupon_amount` DECIMAL(16, 2),\n" +
                "  `ts` BIGINT,\n" +
                "  `rt` AS TO_TIMESTAMP_LTZ(ts, 3),\n" +
                "  WATERMARK FOR rt AS rt - INTERVAL '5' SECOND\n" +
                ") " + MyKafkaUtil.getKafkaDDL("dwd_trade_order_detail", "dws_product_stats");

        tableEnv.executeSql(orderDetailDDL);

        // TODO 4. 创建临时视图进行数据聚合
        String productStatsViewSQL = "CREATE TEMPORARY VIEW product_stats_view AS\n" +
                "SELECT\n" +
                "  DATE_FORMAT(TUMBLE_START(p.rt, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') AS stt,\n" +
                "  DATE_FORMAT(TUMBLE_END(p.rt, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
                "  p.sku_id,\n" +
                "  MAX(od.sku_name) AS sku_name,\n" +
                "  COUNT(DISTINCT p.mid) AS uv,\n" +
                "  COUNT(DISTINCT od.user_id) AS pay_user_count,\n" +
                "  CASE \n" +
                "    WHEN COUNT(DISTINCT p.mid) = 0 THEN 0.0\n" +
                "    ELSE ROUND(COUNT(DISTINCT od.user_id) * 100.0 / COUNT(DISTINCT p.mid), 2)\n" +
                "  END AS pay_convert_rate,\n" +
                "  COUNT(DISTINCT od.order_id) AS order_num,\n" +
                "  COALESCE(SUM(od.split_total_amount), 0) AS gmv,\n" +
                "  UNIX_TIMESTAMP() * 1000 AS ts\n" +
                "FROM (\n" +
                "  SELECT\n" +
                "    common['mid'] AS mid,\n" +
                "    page['sku_id'] AS sku_id,\n" +
                "    rt\n" +
                "  FROM dwd_traffic_page_log\n" +
                "  WHERE page['sku_id'] IS NOT NULL\n" +
                ") p\n" +
                "LEFT JOIN dwd_trade_order_detail od ON p.sku_id = od.sku_id\n" +
                "  AND od.rt BETWEEN p.rt - INTERVAL '10' MINUTE AND p.rt + INTERVAL '10' MINUTE\n" +
                "GROUP BY TUMBLE(p.rt, INTERVAL '10' SECOND), p.sku_id";

        tableEnv.executeSql(productStatsViewSQL);

        // TODO 5. 创建ClickHouse目标表（使用JDBC连接器）
        String clickHouseSinkDDL = "CREATE TABLE product_stats (\n" +
                "  stt STRING,\n" +
                "  edt STRING,\n" +
                "  sku_id STRING,\n" +
                "  sku_name STRING,\n" +
                "  uv BIGINT,\n" +
                "  pay_user_count BIGINT,\n" +
                "  pay_convert_rate DECIMAL(10,2),\n" +
                "  order_num BIGINT,\n" +
                "  gmv DECIMAL(16,2),\n" +
                "  ts BIGINT\n" +
                ") " + ClickHouseUtil.getClickHouseSinkDDL("product_stats");

        tableEnv.executeSql(clickHouseSinkDDL);

        // TODO 6. 执行聚合计算并写入ClickHouse
        String insertSQL = "INSERT INTO product_stats\n" +
                "SELECT stt, edt, sku_id, sku_name, uv, pay_user_count, pay_convert_rate, order_num, gmv, ts\n" +
                "FROM product_stats_view";

        tableEnv.executeSql(insertSQL);

        // TODO 7. 启动任务
        env.execute("DwsProductStats");
    }
}