package com.zhu.app.dwd;

import com.zhu.utils.MySqlUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 订单预处理表 关联5张彪
 * 订单明细表数据  主表
 * 筛选订单数据
 * 筛选订单明细活动关联表
 * 筛选订单明细优惠券关联表
 * 字典表
 */
public class DWDTradeOrderPreProcessApp {
    public static void main(String[] args) throws Exception {

        //todo env
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);
        //checkPoint
        /*
        streamExecutionEnvironment.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE); //精确一次

        //状态后端
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略

         */

        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(streamExecutionEnvironment);
// 获取配置对象
        Configuration configuration = streamTableEnvironment.getConfig().getConfiguration();
// 为表关联时状态中存储的数据设置过期时间
        configuration.setString("table.exec.state.ttl", "5 s");   //为表关联时候状态中数据存储的时间
        //todo  topic_db
        streamTableEnvironment.executeSql(ZhuKafkaUtil.getTopicDB("order_pre_process"));


        //todo filter five table
        //订单预处理表
        //todo 订单明细表
        Table order_detail_table = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['id'] id," +
                        "data['order_id'] order_id," +
                        "data['sku_id'] sku_id," +
                        "data['sku_name'] sku_name, " +
                        "data['create_time'] create_time, " +
                        "data['operate_time'] operate_time, " +
                        "data['source_id'] source_id, " +
                        "data['source_type'] source_type, " +
                        "data['sku_num'] sku_num, " +
                        "cast(cast(data['sku_num'] as decimal(16,2)) * " +
                        "cast(data['order_price'] as decimal(16,2)) as String) split_original_amount," +
                        "data['split_total_amount'] split_total_amount," +
                        "data['split_activity_amount'] split_activity_amount," +
                        "data['split_coupon_amount'] split_coupon_amount," +
                        "pt " +
                        "from `topic_db` " +
                        " where `database` = 'flink' and `table` = 'order_detail'"
        );
        streamTableEnvironment.createTemporaryView("order_detail_table",order_detail_table);
        //test
      //  streamTableEnvironment.toAppendStream(order_detail_table, Row.class).print();

        //todo 订单表
        Table order_Info_table = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['id'] id, " +
                        "data['user_id'] user_id, " +
                        "data['province_id'] province_id, " +
                        "data['operate_time'] operate_time, " +
                        "data['order_status'] order_status, " +
                        "`type`, " +
                        "`old` " +
                        "from `topic_db` where `table` = 'order_info' " +
                        "and (`type` = 'insert' or `type` = 'update') "
        );
        streamTableEnvironment.createTemporaryView("order_Info_table",order_Info_table);
      //  streamTableEnvironment.toAppendStream(order_Info_table,Row.class).print();
        //todo 活动关联表
        Table order_detail_activity = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['order_detail_id'] order_detail_id, " +
                        "data['activity_id'] activity_id, " +
                        "data['activity_rule_id'] activity_rule_id " +
                        "from `topic_db` " +
                        "where `table` = 'order_detail_activity' " +
                        "and `type` = 'insert' "
        );
        streamTableEnvironment.createTemporaryView("order_detail_activity",order_detail_activity);
       // streamTableEnvironment.toAppendStream(order_detail_activity,Row.class).print();

        //todo  优惠券关联表
        Table order_detail_coupon = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['order_detail_id'] order_detail_id, " +
                        "data['coupon_id']  coupon_id " +
                        "from `topic_db` where `table` = 'order_detail_coupon' " +
                        "and `type` = 'insert' "   //生产环境下的数据是不会删除的
        );
        streamTableEnvironment.createTemporaryView("order_detail_coupon",order_detail_coupon);

        //todo base_dic
        streamTableEnvironment.executeSql(MySqlUtil.getBaseDicLooKupDDL());  //

        //todo join
        Table resultTable = streamTableEnvironment.sqlQuery(
                "select " +
                        "od.id, " +
                        "od.order_id, " +
                        "oi.user_id, " +
                        "oi.order_status, " +
                        "od.sku_id, " +
                        "od.sku_name, " +
                        "oi.province_id, " +
                        "act.activity_id, " +
                        "act.activity_rule_id, " +
                        "cou.coupon_id, " +
                        "date_format(od.create_time,'yyyy-MM-dd') date_id, " +
                        "od.create_time, " +
                        "date_format(od.operate_time,'yyyy-MM-dd') operate_date_id, " +
                        "od.operate_time, " +
                        "od.source_id, " +
                        "od.source_type, " +
                        "dic.dic_name source_type_name, " +
                        "od.sku_name, " +
                        "od.split_original_amount, " +
                        "od.split_activity_amount, " +
                        "od.split_coupon_amount, " +
                        "od.split_total_amount, " +
                        "oi.`type`, " +
                        "oi.`old`, " +
                        "current_row_timestamp() row_op_ts " +
                        "from " +
                        "order_detail_table od join order_Info_table oi on od.order_id = oi.id " +
                        "left join order_detail_activity act on od.id = act.order_detail_id " +
                        "left join order_detail_coupon cou on od.id = cou.order_detail_id " +
                        "join `base_dic` for system_time as of od.pt as dic " +
                        "on od.source_type = dic.dic_code "
        );
        streamTableEnvironment.createTemporaryView("result_Table",resultTable);
      //  streamTableEnvironment.toRetractStream(resultTable, Row.class).print();

        //todo write
        //将数据写如kafka
        streamTableEnvironment.executeSql(
                "create table dwd_trade_order_pre_process( " +
                        "id string, " +
                        "order_id string, " +
                        "user_id string, " +
                        "order_status string," +
                        "sku_id string, " +
                        "sku_name string, " +
                        "province_id string, " +
                        "activity_id string, " +
                        "activity_rule_id string, " +
                        "coupon_id string, " +
                        "date_id string, " +
                        "create_time string, " +
                        "operate_date_id string, " +
                        "operate_time string, " +
                        "source_id string, " +
                        "source_type string, " +
                        "source_type_name string, " +
                        "sku_num string, " +
                        "split_original_amount string, " +
                        "split_activity_amount string, " +
                        "split_coupon_amount string, " +
                        "split_total_amount string, " +
                        "`type` string, " +
                        "`old` map<string,string>, " +
                        "row_op_ts timestamp_ltz(3), " +
                        "primary key(id) not enforced " +
                        ") " +  ZhuKafkaUtil.getKafkaUpsertSinkDDL("dwd_trade_order_pre_process")//数据为撤回流
                //Kafka中撤回数据显示为null值  后面需要反序列化器处理

        );
        streamTableEnvironment.executeSql("insert into dwd_trade_order_pre_process " +
                " select * from result_Table"
                ).print();  //没有操作也能写入


        //todo execute
        streamExecutionEnvironment.execute();
    }
}
