package com.atguigu.app.dwd.db;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class DwdTradePayDetailSuc {
    public static void main(String[] args) {
        //TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5*60*1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3*60*1000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atxiaomeng");
        */

        //TODO 3 读取kafka 对应主题的数据 dwd_trade_order_detail
        String topicName = "dwd_course_order_detail";
        String groupId = "dwd_trade_pay_detail_suc";
        tableEnv.executeSql("create table order_detail(\n" +
                "  id STRING,\n" +
                "  course_id STRING,\n" +
                "  course_name STRING,\n" +
                "  order_id STRING,\n" +
                "  user_id STRING,\n" +
                "  origin_amount STRING,\n" +
                "  coupon_reduce STRING,\n" +
                "  final_amount STRING,\n" +
                "  create_time STRING,\n" +
                "  update_time STRING\n" +
                ")"+KafkaUtil.getKafkaDDL(topicName,groupId));

        //TODO 4 kafka topic_db 过滤出支付成功主题的数据
        tableEnv.executeSql("create table  topic_db( \n" +
                "`database` STRING,\n" +
                "`table` STRING,\n" +
                "`type` STRING,\n" +
                "`ts` bigint,\n" +
                "`xid` STRING,\n" +
                "`commit` STRING,\n" +
                "`data` MAP<STRING,STRING>,\n" +
                "`pt` as  proctime()" +
                ")"+KafkaUtil.getKafkaDDL("topic_db",groupId));

        Table paySuccTable = tableEnv.sqlQuery("select \n" +
                "  `data`['order_id'] order_id,\n" +
                "  `data`['id'] id,\n" +
                "  `data`['payment_type'] payment_type,\n" +
                "  `data`['callback_time'] callback_time,\n" +
                "  `ts`,\n" +
                "  `pt`\n" +
                "from topic_db\n" +
                "where `table`='payment_info'\n" +
                "and(`type`='update'and`data`['payment_status']='1602')");
        tableEnv.createTemporaryView("pay_succ",paySuccTable);

        //TODO 6 关联这两张表
        Table paySuccDetailTable = tableEnv.sqlQuery("select\n" +
                "  od.id, \n" +
                "  od.course_id, \n" +
                "  od.course_name, \n" +
                "  od.order_id, \n" +
                "  od.user_id, \n" +
                "  od.origin_amount, \n" +
                "  od.coupon_reduce, \n" +
                "  od.final_amount, \n" +
                "  od.create_time, \n" +
                "  od.update_time, \n" +
                "  ps.id,\n" +
                "  ps.payment_type,\n" +
                "  ps.callback_time,\n" +
                "  ps.ts,\n" +
                "  ps.pt\n" +
                "from pay_succ ps\n" +
                "join  order_detail od\n" +
                "on ps.order_id=od.order_id");
//        paySuccDetailTable.execute().print();

        tableEnv.createTemporaryView("pay_succ_detail",paySuccDetailTable);

        //TODO 7 写出到kafka
        String targetTopicName = "dwd_trade_pay_detail_suc";
        tableEnv.executeSql("create table pay_succ_result(\n" +
                "od_id STRING,\n" +
                "course_id STRING,\n" +
                "course_name STRING,\n" +
                "order_id STRING,\n" +
                "user_id STRING,\n" +
                "origin_amount STRING,\n" +
                "coupon_reduce STRING,\n" +
                "final_amount STRING,\n" +
                "create_time STRING,\n" +
                "update_time STRING,\n" +
                "ps_id STRING,\n" +
                "payment_type STRING,\n" +
                "callback_time STRING,\n" +
                "ts BIGINT,\n" +
                "pt TIMESTAMP_LTZ(3)\n" +
                ")"+KafkaUtil.getKafkaSinkDDL(targetTopicName));

            tableEnv.executeSql("insert into pay_succ_result select * from pay_succ_detail");

    }

}
