package com.bw.gmall.realtime.app.dwd;

import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Zb2_Shop_Not_purchased_User_order {
    public static void main(String[] args) {
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

// 创建新访客表
        tableEnv.executeSql("" +
                "create table dwd_shop_new_visit( " +
                "    `common` map<string,string>, " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) " + MyKafkaUtil.getKafkaDDL("dwd_shop_new_visit", "dwd_shop_new_visitaaa"));

// 将rowtime属性转换为普通timestamp
        Table table = tableEnv.sqlQuery("select " +
                "common['uid'] us, " +
                "page['shop'] shops, " +
                "page['shop_new_visit'] snv, " +
                "CAST(rt AS TIMESTAMP(3)) as rt, " +  // 关键修改：转换为普通TIMESTAMP
                "DATE_FORMAT(TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), 'yyyy-MM-dd') as dt " +
                "from dwd_shop_new_visit  " +
                "");




        tableEnv.createTemporaryView("dwd_shop_new_visit1", table);

// 创建订单表
        tableEnv.executeSql("CREATE TABLE dws_trade_shop_user_order_amount (\n" +
                "  date_id STRING,\n" +
                "  user_id STRING,\n" +
                "  shop STRING,\n" +
                "  total_amount DOUBLE,\n" +
                "  ts TIMESTAMP(3),\n" +
                "    WATERMARK FOR ts AS ts - INTERVAL '2' SECOND " +
                ") " + MyKafkaUtil.getKafkaDDL("dws_trade_shop_user_order_amount", "dws_shop_new_visit_order"));

// 将订单表的rowtime属性也转换为普通timestamp
        Table orderTable = tableEnv.sqlQuery("select " +
                "date_id, " +
                "user_id, " +
                "shop, " +
                "total_amount, " +
                "CAST(ts AS TIMESTAMP(3)) as rt " +  // 关键修改：转换为普通TIMESTAMP
                "from dws_trade_shop_user_order_amount");

        tableEnv.createTemporaryView("dws_trade_shop_user_order_amount1", orderTable);

// 关联新访客表和订单表（使用转换后的视图）
        Table table1 = tableEnv.sqlQuery("" +
                "select  " +
                " us,shops,dt,snv,dwd_shop_new_visit1.rt,total_amount " +
                "FROM dwd_shop_new_visit1  " +
                "LEFT JOIN dws_trade_shop_user_order_amount1  " +
                "ON us = user_id AND shops = shop AND dt = date_id ");

        tableEnv.createTemporaryView("dwd_shop_new_visit2", table1);
        tableEnv.executeSql("" +
                "CREATE TABLE Zb2_Shop_Not_purchased_User_order (\n" +
                "  us STRING COMMENT '用户ID',\n" +
                "  shops STRING COMMENT '店铺',\n" +
                "  dt STRING COMMENT '日期',\n" +
                "  snv STRING COMMENT '新访客标识（根据您的查询推测）',\n" +
                "  rt TIMESTAMP(3) COMMENT '事件时间',\n" +
                "  total_amount DOUBLE COMMENT '订单金额',\n" +
                "  PRIMARY KEY (us, shops, dt) NOT ENFORCED\n" +
                ") " + MyKafkaUtil.getUpsertKafkaDDL("Zb2_Shop_Not_purchased_User_order"));
              tableEnv.executeSql("insert into Zb2_Shop_Not_purchased_User_order select * from dwd_shop_new_visit2");

    }
}
