package atguigu.com.edu.app.dwd.db;

import atguigu.com.edu.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//交易域取消交易表
public class DwdTradeOrderCancelDetail {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

      /*  env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(6000L);
        env.getCheckpointConfig().setCheckpointTimeout(5000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop202:8020/edu/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");*/

        tableEnv.executeSql("create table dwd_trade_order_pre_process(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "origin_amount string,\n" +
                "coupon_reduce string,\n" +
                "final_amount string,\n" +
                "date_id string,\n" +
                "create_time string,\n" +
                "od_ts string,\n" +
                "session string,\n" +
                "province_id string,\n" +
                "operate_date_id string,\n" +
                "operate_time string,\n" +
                "order_status string,\n" +
                "`type` string,\n" +
                "`old` map<string,string>,\n" +
                "oi_ts string,\n" +
                "row_op_ts timestamp_ltz(3)\n" +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_trade_order_pre_process","dwd_trade_order_detail_group"));


        Table filteredTable = tableEnv.sqlQuery("select \n" +
                "id,\n" +
                "course_id,\n" +
                "course_name,\n" +
                "order_id,\n" +

                "final_amount,\n" +
                "date_id,\n" +
                "create_time,\n" +
                "od_ts,\n" +


                "user_id,\n" +


                "province_id,\n" +
                " operate_date_id,\n" +
                "operate_time,\n" +
                "order_status,\n" +

                "oi_ts,\n" +

                "current_row_timestamp() row_op_ts\n" +
                "from dwd_trade_order_pre_process\n" +
                "where `type` = 'update'\n" +

                "and order_status = '1003'");
        tableEnv.createTemporaryView("filtered_table", filteredTable);

        // TODO 5. 创建 Kafka 下单明细表
        tableEnv.executeSql("create table dwd_trade_cancel_detail(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "order_id string,\n" +


                "final_amount string,\n" +
                "date_id string,\n" +
                "create_time string,\n" +
                "od_ts string,\n" +


                "user_id string,\n" +


                "province_id string,\n" +
                "operate_date_id string,\n" +
                "operate_time string,\n" +
                "order_status string,\n" +

                "oi_ts string,\n" +


                "row_op_ts timestamp_ltz(3),\n" +
                "primary key(id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cancel_detail"));

        // TODO 6. 将数据写出到 Kafka
        tableEnv.executeSql("insert into dwd_trade_cancel_detail select * from filtered_table");
        tableEnv.executeSql("select * from filtered_table").print();



    }


}
