package com.lhd.app.dws;

import com.lhd.bean.ProductDetailStats;
import com.lhd.common.utils.MyClickHouseUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.util.Iterator;
import java.util.concurrent.TimeUnit;

public class DwsProductDetailPrintVersion {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 创建DWD层源表
        tableEnv.executeSql("CREATE TABLE dwd_traffic_page_log ( " +
                "    common MAP<STRING, STRING>, " +
                "    page MAP<STRING, STRING> " +
                ") WITH ( " +
                "    'connector' = 'kafka', " +
                "    'topic' = 'dwd_traffic_page_log', " +
                "    'properties.bootstrap.servers' = 'hadoop102:9092', " +
                "    'properties.group.id' = 'dws_print_version', " +
                "    'scan.startup.mode' = 'latest-offset', " +
                "    'format' = 'json' " +
                ")");

        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add ( " +
                "    user_id STRING " +
                ") WITH ( " +
                "    'connector' = 'kafka', " +
                "    'topic' = 'dwd_trade_cart_add', " +
                "    'properties.bootstrap.servers' = 'hadoop102:9092', " +
                "    'properties.group.id' = 'dws_print_version', " +
                "    'scan.startup.mode' = 'latest-offset', " +
                "    'format' = 'json' " +
                ")");

        tableEnv.executeSql("CREATE TABLE dwd_interaction_favor_add ( " +
                "    user_id STRING " +
                ") WITH ( " +
                "    'connector' = 'kafka', " +
                "    'topic' = 'dwd_interaction_favor_add', " +
                "    'properties.bootstrap.servers' = 'hadoop102:9092', " +
                "    'properties.group.id' = 'dws_print_version', " +
                "    'scan.startup.mode' = 'latest-offset', " +
                "    'format' = 'json' " +
                ")");

        // 等待数据加载
        System.out.println("等待数据加载...");
        TimeUnit.SECONDS.sleep(5);

        // 分别查询各项指标
        System.out.println("开始统计...");

        // 1. 统计页面访问量
        TableResult pageResult = tableEnv.executeSql(
                "SELECT COUNT(*) as pv FROM dwd_traffic_page_log WHERE page['page_id'] IS NOT NULL"
        );
        Long pv = getLongResult(pageResult);

        // 2. 统计独立访客
        TableResult uvResult = tableEnv.executeSql(
                "SELECT COUNT(DISTINCT common['mid']) as uv FROM dwd_traffic_page_log WHERE page['page_id'] IS NOT NULL"
        );
        Long uv = getLongResult(uvResult);

        // 3. 统计加购人数
        TableResult cartResult = tableEnv.executeSql(
                "SELECT COUNT(DISTINCT user_id) as cart_users FROM dwd_trade_cart_add"
        );
        Long cartUsers = getLongResult(cartResult);

        // 4. 统计收藏人数
        TableResult favorResult = tableEnv.executeSql(
                "SELECT COUNT(DISTINCT user_id) as favor_users FROM dwd_interaction_favor_add"
        );
        Long favorUsers = getLongResult(favorResult);

        // 5. 获取当前时间
        TableResult timeResult = tableEnv.executeSql("SELECT DATE_FORMAT(CURRENT_TIMESTAMP, 'yyyy-MM-dd HH:mm:ss') as current_time");
        String currentTime = getStringResult(timeResult);

        // 打印结果
        printResult(pv, uv, cartUsers, favorUsers, currentTime);

        // 使用工具类将结果写入ClickHouse
        ProductDetailStats stats = new ProductDetailStats(
                "商品详情页全局统计",
                pv,
                uv,
                cartUsers,
                favorUsers,
                currentTime
        );

        DataStream<ProductDetailStats> statsStream = env.fromElements(stats);
        String sql = "INSERT INTO product_detail_stats (stat_type, total_pv, total_uv, cart_users, favor_users, stat_time) VALUES (?, ?, ?, ?, ?, ?)";
        statsStream.addSink(MyClickHouseUtil.getSinkFunction(sql));

        env.execute("DWS Product Detail Stats");
    }

    private static Long getLongResult(TableResult result) {
        try {
            Iterator<Row> it = result.collect();
            if (it.hasNext()) {
                Row row = it.next();
                return row.getField(0) != null ? (Long) row.getField(0) : 0L;
            }
        } catch (Exception e) {
            System.err.println("获取结果失败: " + e.getMessage());
        }
        return 0L;
    }

    private static String getStringResult(TableResult result) {
        try {
            Iterator<Row> it = result.collect();
            if (it.hasNext()) {
                Row row = it.next();
                return row.getField(0) != null ? row.getField(0).toString() : "未知时间";
            }
        } catch (Exception e) {
            System.err.println("获取时间失败: " + e.getMessage());
        }
        return "未知时间";
    }

    private static void printResult(Long pv, Long uv, Long cartUsers, Long favorUsers, String currentTime) {
        System.out.println("=========================================");
        System.out.println("       商品详情页全局统计数据");
        System.out.println("=========================================");
        System.out.println("+----+---------------------+--------+-----------+--------+-----------+---------------------+");
        System.out.println("| op |          统计类型    | 总浏览量 | 总访客数 | 加购人数 | 收藏人数 |          统计时间    |");
        System.out.println("+----+---------------------+--------+-----------+--------+-----------+---------------------+");

        System.out.printf("| +I | %-19s | %-6d | %-9d | %-6d | %-9d | %-19s |%n",
                "商品详情页全局统计", pv, uv, cartUsers, favorUsers, currentTime);

        System.out.println("+----+---------------------+--------+-----------+--------+-----------+---------------------+");
    }
}
