package wangZhong.app.dwd.db;

import wangZhong.app.BaseSqlApp;
import wangZhong.common.Constant;
import wangZhong.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class DwdTradeOrderPreProcess extends BaseSqlApp {
    public static void main(String[] args) {
        new DwdTradeOrderPreProcess().init(
            3005,
            2,
            "DwdTradeOrderPreProcess"
        );
    }

    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        tEnv.getConfig().getConfiguration().setString("table.exec.state.ttl","30 minute");

        // TODO 1.读取edu_db
        readOdsDb(tEnv, "DwdTradeOrderProcess");

        //读取provence
        tEnv.executeSql("create table base_province(" +
                "id bigint, " +
                "name varchar " +
                ")with(" +
                " 'connector'='jdbc',  " +
                " 'url' = 'jdbc:mysql://hadoop162:3306/edu?useSSL=false'," +
                " 'table-name' = 'base_province', " +
                " 'username' = 'root',  " +
                " 'password' = 'aaaaaa'," +
                " 'lookup.cache.max-rows' = '10', " +
                " 'lookup.cache.ttl' = '1 hour' " +
                ")");

        // TODO 2.过滤order_detail
        Table orderDetail = tEnv.sqlQuery("" +
            "select\n" +
            "    data['id'] id,\n" +
            "    data['course_id'] course_id,\n" +
            "    data['course_name'] course_name,\n" +
            "    data['order_id'] order_id,\n" +
            "    data['user_id'] user_id,\n" +
            "    data['origin_amount'] origin_amount,\n" +
            "    data['coupon_reduce'] coupon_reduce,\n" +
            "    data['final_amount'] final_amount,\n" +
            "    data['create_time'] create_time,\n" +
            "    data['update_time'] update_time,\n" +
            "    ts od_ts,\n" +
            "    pt\n" +
            "from edu_db\n" +
            "where `database`='edu'\n" +
            "and `table`='order_detail'\n" +
            "and `type`='insert'");

        tEnv.createTemporaryView("order_detail", orderDetail);
//        tEnv.executeSql("select * from order_detail"); //测试

        // TODO 2.过滤order_info
        Table orderInfo = tEnv.sqlQuery("" +
            "select\n" +
            "    data['id'] id,\n" +
            "    data['user_id'] user_id,\n" +
            "    data['order_status'] order_status,\n" +
            "    cast(data['province_id'] as bigint) province_id,\n" +
            "    data['update_time'] update_time,\n" +
            "    `type`,\n" +
            "    `old`,\n" +
            "    ts oi_ts,\n" +
            "    pt\n" +
            "from edu_db\n" +
            "where `database`='edu'\n" +
            "and `table`='order_info'\n" +
            "and (`type`='insert' or `type`='update')");

        tEnv.createTemporaryView("order_info", orderInfo);
//        tEnv.executeSql("select * from order_info"); // 测试


        // TODO 3.join
        Table result = tEnv.sqlQuery("" +
            "select\n" +
            "    oi.id,\n" +
            "    od.order_id,\n" +
            "    oi.user_id,\n" +
            "    oi.order_status,\n" +
            "    od.course_id,\n" +
            "    od.course_name,\n" +
            "    oi.province_id,\n" +
            "    pr.name province_name,\n" +
            "    od.origin_amount,\n" +
            "    od.coupon_reduce,\n" +
            "    od.final_amount,\n" +
            "    date_format(od.create_time, 'yyyy-MM-dd') date_id,\n" +
            "    od.create_time,\n" +
            "    date_format(oi.update_time, 'yyyy-MM-dd') update_time_id,\n" +
            "    oi.update_time,\n" +
            "    oi.`type`,\n" +
            "    oi.`old`,\n" +
            "    od.`od_ts`,\n" +
            "    oi.`oi_ts`,\n" +
            "    localtimestamp row_op_ts\n" + // 这个每条数据计算的的时候的系统时间
            "from order_info oi\n" +
            "join base_province for system_time as of oi.pt as pr on oi.province_id = pr.id\n" +
            "join order_detail od on oi.id = od.order_id");

        // TODO 4.定义动态与输出的topic关联
        tEnv.executeSql("" +
            "create table dwd_trade_order_pre_process(\n" +
            "    id string,\n" +
            "    order_id string,\n" +
            "    user_id string,\n" +
            "    order_status string,\n" +
            "    course_id string,\n" +
            "    course_name string,\n" +
            "    province_id bigint,\n" +
            "    province_name string,\n" +
            "    origin_amount string,\n" +
            "    coupon_reduce string,\n" +
            "    final_amount string,\n" +
            "    date_id string,\n" +
            "    create_time string,\n" +
            "    update_time_id string,\n" +
            "    update_time string,\n" +
            "    `type` string,\n" +
            "    `old` map<string, string>,\n" +
            "    od_ts bigint,\n" +
            "    oi_ts bigint,\n" +
            "    row_op_ts timestamp_ltz(3),\n" +
            "    primary key(id) not enforced \n" +
            ")" + SQLUtil.getUpsertKafkaSink(Constant.TOPIC_DWD_TRADE_ORDER_PRE_PROCESS)
        );

        // TODO 5.把join的结果写出到输出的表
        result.executeInsert("dwd_trade_order_pre_process");
    }
}
/*
order_detail
    join
        ttl设置多少合理?
            下单会产出一条order_info和n条order_detail, 几乎同时产生   ttl 10s

            订单取消. order_info一条数据发送update, order_detail不会发生任何
                取消的时候可能已经过去30m分钟, 也需要去join 以前的详情  ttl 1h

order_info
    left join
activity
    left join
coupon
    lookup join
dic


---
写kafka要用 upsert-kafka



1  100.1        null      生成时间
1  100.1         10       生成时间

由于有left join的存在, 将来消费的时候会有重复数据, 需要去重
保留数据最全, 就是数据生成时间最大的
*/
















