package com.atguigu.edu.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.MyKafkaUtil;

/*
        课程域:下单事实表
 */
public class DwdInteractionOrderDetail {
    public static void main(String[] args) {
        // 1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //  设置并行度
        env.setParallelism(4);
        //  指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //  2.设置检查点相关配置   略
        //  3.从kafka的dwd_interaction_order_per主题中读取数据   创建动态表
        tableEnv.executeSql("create table dwd_interaction_order_per(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "origin_amount string,\n" +
                "final_amount string,\n" +
                "session_id string,\n" +
                "od_ts string,\n" +
                "order_status string,\n" +
                "out_trade_no string,\n" +
                "province_id string,\n" +
                "create_time string,\n" +
                "expire_time string,\n" +
                "update_time string,\n" +
                "`type` string,\n" +
                "`old` map<string,string>,\n" +
                "oi_ts string,\n" +
                "row_op_ts timestamp_ltz(3)\n" +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_interaction_order_per","dwd_interaction_order_detail"));
        //  4.筛选下单行为
        Table filteredTable  = tableEnv.sqlQuery("select\n" +
                "id\n" +
                ",course_id\n" +
                ",course_name\n" +
                ",order_id\n" +
                ",user_id\n" +
                ",origin_amount\n" +
                ",final_amount\n" +
                ",session_id\n" +
                ",od_ts od_ts\n" +

                ",order_status\n" +
                ",out_trade_no\n" +
                ",province_id\n" +
                ",create_time\n" +
                ",expire_time\n" +
                ",update_time\n" +
                ",type\n" +
                ",`old`\n" +
                ",oi_ts ts\n" +
                ",row_op_ts\n" +
                "from dwd_interaction_order_per  \n" +
                "where `type`='insert'");
        tableEnv.createTemporaryView("filtered_table",filteredTable);

        //  5.创建动态表和要写入的kafka主题进行映射
        tableEnv.executeSql("create table dwd_interaction_order_detail(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "origin_amount string,\n" +
                "final_amount string,\n" +
                "session_id string,\n" +
                "od_ts string,\n" +
                "order_status string,\n" +
                "out_trade_no string,\n" +
                "province_id string,\n" +
                "create_time string,\n" +
                "expire_time string,\n" +
                "update_time string,\n" +
                "`type` string,\n" +
                "`old` map<string,string>,\n" +
                "ts string,\n" +
                "row_op_ts timestamp_ltz(3),\n" +
                "primary key(id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_interaction_order_detail"));

        //   6. 将下单行为写到kafka主题中
        tableEnv.executeSql("insert into dwd_interaction_order_detail select * from filtered_table");



    }
}
