package app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import utils.DateFormatUtil;
import utils.MyKafkaUtil;

import java.time.Duration;
//数据流：web/app ->Mysql -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程 序: Mock -> Mysql -> Maxwell -> Kafka(ZK) -> Dwd04_dwd_trade_pay_detail_suc -> Kafka(ZK)

public class Dwd04_dwd_trade_pay_detail_suc {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
//        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(18*60+5));
        //  状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");


        //2.使用flinksql的方式kafka ODS层读取topic_db的数据
        tableEnv.executeSql("create table topic_db_payment_info(\n" +
                "`database` string,\n" +
                "`table` string,\n" +
                "`type` string,\n" +
                "`ts` string,\n" +
                "`data` map<string,string>,\n" +
                "`old` map<string,string>,\n" +
                "rt as to_timestamp(`data`['callback_time']),\n" +
                "watermark for rt as rt - interval '2' second)"+MyKafkaUtil.getKafkaSourceConnOption("topic_db","pay_detail_suc"));

        Table table11 = tableEnv.sqlQuery("select * from topic_db_payment_info");
        tableEnv.toAppendStream(table11, Row.class).print("table11--->");


        //3.筛选payment_info的数据
        Table paymentInfoTable = tableEnv.sqlQuery("select \n" +
                "`data`['id'] id, \n" +
                "`data`['out_trade_no'] out_trade_no, \n" +
                "`data`['order_id'] order_id, \n" +
                "`data`['alipay_trade_no'] alipay_trade_no, \n" +
                "`data`['total_amount'] total_amount, \n" +
                "`data`['trade_body'] trade_body, \n" +
                "`data`['payment_type'] payment_type, \n" +
                "`data`['payment_status'] payment_status, \n" +
                "`data`['create_time'] create_time, \n" +
                "`data`['callback_time'] callback_time,\n" +
                "rt\n" +
                "from topic_db_payment_info \n" +
                "where `database`='edu' \n" +
                "and `table`='payment_info' \n" +
                "and `type`='insert'" );
        tableEnv.createTemporaryView("payment_info",paymentInfoTable);
        tableEnv.toAppendStream(paymentInfoTable, Row.class).print("paymentInfoTable--->");

        //4.从kafka ODS层读取topic_db的数据(rt不一样)
        tableEnv.executeSql("create table topic_db_order_info(\n" +
                "`database` string,\n" +
                "`table` string,\n" +
                "`type` string,\n" +
                "`ts` string,\n" +
                "`data` map<string,string>,\n" +
                "`old` map<string,string>,\n" +
                "rt as to_timestamp(data['create_time']),\n" +
                "watermark for rt as rt - interval '2' second)"+MyKafkaUtil.getKafkaSourceConnOption("topic_db","pay_detail_suc"));


        //5.筛选出order_info中满足支付成功条件的数据
        Table orderInfoTable = tableEnv.sqlQuery("select \n" +
                "`data`['id'] id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['course_id'] course_id,\n" +
                "`data`['course_name'] course_name,\n" +
                "`data`['origin_amount'] origin_amount,\n" +
                "`data`['coupon_reduce'] coupon_reduce,\n" +
                "`data`['final_amount'] final_amount,\n" +
                "`data`['order_status'] order_status,\n" +
                "`data`['out_trade_no'] out_trade_no,\n" +
                "`data`['trade_body'] trade_body,\n" +
                "`data`['session_id'] session_id,\n" +
                "`data`['province_id'] province_id,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['expire_time'] expire_time,\n" +
                "`data`['update_time'] update_time,\n" +
                "rt\n" +
                "from topic_db_order_info \n" +
                "where `database`='edu' \n" +
                "and `table`='order_info' \n" +
                "and `type`='update' \n" +
                "and `data`['order_status']='1004'\n" +
                "and `old`['order_status']='1002'" );
        tableEnv.createTemporaryView("order_info",orderInfoTable);
        tableEnv.toAppendStream(orderInfoTable, Row.class).print("orderInfoTable--->");
        //6.两表关联 (下单后最晚15min内完成支付，考虑到网络延迟，订单明细数据的生成时间处于支付数据生成时间之前15min和之后5s范围内)
        Table resultTable = tableEnv.sqlQuery("select \n" +
                "pi.id,\n" +
                "pi.out_trade_no,\n" +
                "pi.order_id,\n" +
                "pi.alipay_trade_no,\n" +
                "pi.total_amount,\n" +
                "pi.trade_body,\n" +
                "pi.payment_type,\n" +
                "pi.payment_status,\n" +
                "pi.create_time,\n" +
                "pi.callback_time,\n" +
                "oi.user_id,\n" +
                "oi.course_id,\n" +
                "oi.course_name,\n" +
                "oi.origin_amount,\n" +
                "oi.coupon_reduce,\n" +
                "oi.final_amount,\n" +
                "oi.session_id,\n" +
                "oi.order_status, \n" +
                "oi.province_id \n" +
                "from payment_info pi\n" +
                "join order_info oi\n" +
                "on pi.order_id=oi.id\n" );
//                "and oi.rt>=pi.rt - interval '15' minute\n" +
//                "and oi.rt<=pi.rt + interval '5' second" );
        tableEnv.createTemporaryView("result_table",resultTable);
        tableEnv.toAppendStream(resultTable, Row.class).print("resultTable--->");

        //7.使用普通kafka创建sink dwd_trade_pay_detail_suc
        tableEnv.executeSql("create table dwd_trade_pay_detail_suc(\n" +
                "id string,\n" +
                "out_trade_no string,\n" +
                "order_id string,\n" +
                "alipay_trade_no string,\n" +
                "total_amount string,\n" +
                "trade_body string,\n" +
                "payment_type string,\n" +
                "payment_status string,\n" +
                "create_time string,\n" +
                "callback_time string,\n" +
                "user_id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "origin_amount string,\n" +
                "coupon_reduce string,\n" +
                "final_amount string,\n" +
                "session_id string,\n" +
                "order_status string,\n" +
                "province_id string\n" +
                ")"+MyKafkaUtil.getKafkaSinkConnOption("dwd_trade_pay_detail_suc"));

        //8.写入数据
        tableEnv.executeSql("insert into dwd_trade_pay_detail_suc select * from result_table");

        env.execute();
    }
}
