package atguigu.com.edu.app.dwd.db;

import atguigu.com.edu.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//交易域-加购事实表
public class DwdTradeCartAdd {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

       StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
/*
    //检查点
        env.enableCheckpointing(60000L, CheckpointingMode.EXACTLY_ONCE);

        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(6000L);

        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);

        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));

        env.getCheckpointConfig().setCheckpointTimeout(6000L);
        env.setStateBackend(new HashMapStateBackend());

        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop202:8088/edu/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");*/


    //从kafka主题里面映射为动态表
      tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_cart_add_group"));

        Table tableResult = tableEnv.sqlQuery("select\n" +
                "    `data`['id'] id,\n" +
                "    `data`['user_id'] user_id,\n" +
                "    `data`['course_id'] course_id,\n" +
                "date_format(data['create_time'],'yyyy-MM-dd') date_id,\n" +
                "data['create_time'] create_time,\n" +
                "ts od_ts\n" +
                "from topic_db\n" +
                "where `table`='cart_info'\n" +
                "and (`type`='insert' or\n" +
                "   (`type`='update' and  CAST( `data`['deleted'] as int) =0 )\n" +
                ")");



        tableEnv.createTemporaryView("dwd_tr",tableResult);
        tableEnv.executeSql("select * from dwd_tr").print();
        //  tableEnv.sqlQuery()
       tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add(\n" +
                "id string,\n" +
                "user_id string,\n" +
                "course_id string,\n" +
                " date_id string,\n" +
                " create_time string,\n" +
                "ts string,\n" +
                "PRIMARY KEY (id) NOT ENFORCED\n"+
                ")"+MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cart_add"));





        tableEnv.executeSql("insert into dwd_trade_cart_add select * from dwd_tr");














        //env.execute();
    }


}
