package com.zhu.app.dwd;

import com.zhu.utils.MySqlUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * 用于获取 user_id 和 province_id。退款操作完后时，订单表的 order_status 字段会更
 * 新为 1006（退款完成），因此退单成功对应的订单数据应满足三个条件：（1）操作类型为
 * update；
 * （2）order_status 为 1006；（3）修改了 order_status 字段。
 * order_status 值更改为 1006 之后对应的订单表数据就不会再发生变化，所以只要满足
 * 前两个条件，第三个条件必定满足。
 */
public class DWDTradeRefundPaySucApp {

    public static void main(String[] args) throws Exception {

        //todo env
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);   //kafka topic 4
        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(streamExecutionEnvironment);

        //考虑数乱序问题 设过过期时间为 5S
        Configuration configuration = streamTableEnvironment.getConfig().getConfiguration();
        configuration.setString("table.exec.state.ttl","5 s");

        //check point
          /*
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略
        */

        //todo kafka topic_db
        streamTableEnvironment.executeSql(ZhuKafkaUtil.getTopicDB("dwd_trade_refund_pay_suc"));

        //todo MySQL base_dic
        streamTableEnvironment.executeSql(MySqlUtil.getBaseDicLooKupDDL());

        //todo kafka topic_db  refund_payment
        Table refund_payment = streamTableEnvironment.sqlQuery("" +
                "select " +
                "data['id'] id, " +
                "data['order_id'] order_id, " +
                "data['sku_id'] sku_id, " +  //最细粒度
                "data['payment_type'] payment_type, " +
                "data['callback_time'] callback_time, " +
                "data['total_amount'] total_amount, " +
                "pt " +
                "from topic_db " +
                "where `table` = 'refund_payment' "
                /*
                "and `type` = 'update' " +
                "and data['refund_status'] = '0702' " +  //一系列业务过程
                "and `old`['refund_status'] is not null "

                 */
        );
        streamTableEnvironment.createTemporaryView("refund_payment",refund_payment);
      //  streamTableEnvironment.toDataStream(refund_payment, Row.class).print();



        //todo kafka topic_db order_refund_info
        Table order_refund_info = streamTableEnvironment.sqlQuery("" +
                "select " +
                "data['order_id'] order_id, " +
                "data['sku_id'] sku_id, " +
                "data['refund_num'] refund_num, " +
                "`old` " +
                "from topic_db " +
                "where `table` = 'order_refund_info'"

                /*
                " and `type` = 'update' and " +
                "`data`['refund_status'] = '0705' and " +
                "`old`['refund_status'] is not null"
*/

        );
        streamTableEnvironment.createTemporaryView("order_refund_info",order_refund_info);
        //streamTableEnvironment.toDataStream(order_refund_info,Row.class).print();


        //todo kafka topic_db order_info   过滤退款完成的数据  退款的数据来源于订单表，因此一定可以成功关联
        Table order_info = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['id'] id,\n" +
                        "data['user_id'] user_id,\n" +
                        "data['province_id'] province_id,\n" +
                        "`old` " +
                        "from topic_db " +
                        "where `table` = 'order_info' and `type` = 'update' and " +
                        "data['order_status'] = '1005' and `old`['order_status'] is not null "

        );
        streamTableEnvironment.createTemporaryView("order_info",order_info);
        streamTableEnvironment.toDataStream(order_info,Row.class).print();

        //todo join
        Table result = streamTableEnvironment.sqlQuery(

                "select " +
                        "rp.id,\n" +
                        "oi.user_id,\n" +
                        "rp.order_id,\n" +
                        "rp.sku_id,\n" +
                        "oi.province_id,\n" +
                        "rp.payment_type,\n" +
                        "dic.dic_name payment_type_name,\n" +
                        "date_format(rp.callback_time,'yyyy-MM-dd') date_id,\n" +
                        "rp.callback_time,\n" +
                        "ri.refund_num,\n" +
                        "rp.total_amount,\n" +
                        "current_row_timestamp() row_op_ts\n" +
                        " from refund_payment rp " +
                        "join order_info oi " +
                        "on rp.order_id = oi.id " +
                        "join " +
                        "order_refund_info ri " +
                        "on rp.sku_id = ri.sku_id " +
                        "join base_dic for system_time as of rp.pt as dic " +
                        "on rp.payment_type = dic.dic_code "
        );
        //注册视图

        streamTableEnvironment.createTemporaryView("result_table",result);
        streamTableEnvironment.toDataStream(result).print();


        // TODO 9. 创建 Kafka-Connector dwd_trade_refund_pay_suc 表
        streamTableEnvironment.executeSql("create table dwd_trade_refund_pay_suc(\n" +
                        "id string,\n" +
                                "user_id string,\n" +
                                "order_id string,\n" +
                                "sku_id string,\n" +
                                "province_id string,\n" +
                                "payment_type_code string,\n" +
                                "payment_type_name string,\n" +
                                "date_id string,\n" +
                                "callback_time string,\n" +
                                "refund_num string,\n" +
                                "refund_amount string,\n" +
                                "row_op_ts timestamp_ltz(3) " +
                                ")" +
                                ZhuKafkaUtil.getKafkaSinkDDL("dwd_trade_refund_pay_suc"));


        //todo write to kafka_topic dwd_trade_refund_pay_suc
        streamTableEnvironment.executeSql(
                "insert into dwd_trade_refund_pay_suc select * from result_table "
        ).print();


       // streamTableEnvironment.executeSql
        streamExecutionEnvironment.execute();



    }
}
