package com.atguigu.gmall.realtime.dwd.db.app;

import com.atguigu.gmall.realtime.common.base.BaseSQLApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.util.SQLUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author Felix
 * @date 2024/6/05
 * 退款成功事实表
 * 需要启动的进程
 * zk、kafka、maxwell、hdfs、hbase、DwdTradeRefundPaySucDetail
 */
public class DwdTradeRefundPaySucDetail extends BaseSQLApp {
	public static void main(String[] args) {
		new DwdTradeRefundPaySucDetail().start(
				10018,
				4,
				Constant.TOPIC_DWD_TRADE_REFUND_PAYMENT_SUCCESS
		);
	}

	@Override
	public void handle(StreamTableEnvironment tEnv) {
		tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(5));

		// 1. 读取 topic_db
		readOdsDb(tEnv, Constant.TOPIC_DWD_TRADE_REFUND_PAYMENT_SUCCESS);
		// 2. 读取 字典表
		readBaseDic(tEnv);

		// 3. 过滤退款成功表数据
		Table refundPayment = tEnv.sqlQuery(
				"select " +
						"data['id'] id," +
						"data['order_id'] order_id," +
						"data['sku_id'] sku_id," +
						"data['payment_type'] payment_type," +
						"data['callback_time'] callback_time," +
						"data['total_amount'] total_amount," +
						"pt, " +
						"ts " +
						"from topic_db " +
						"where `table`='refund_payment' " +
						"and `type`='update' " +
						"and `old`['refund_status'] is not null " +
						"and `data`['refund_status']='1602'");
		tEnv.createTemporaryView("refund_payment", refundPayment);

		// 4. 过滤退单表中的退单成功的数据
		Table orderRefundInfo = tEnv.sqlQuery(
				"select " +
						"data['order_id'] order_id," +
						"data['sku_id'] sku_id," +
						"data['refund_num'] refund_num " +
						"from topic_db " +
						"where `table`='order_refund_info' " +
						"and `type`='update' " +
						"and `old`['refund_status'] is not null " +
						"and `data`['refund_status']='0705'");
		tEnv.createTemporaryView("order_refund_info", orderRefundInfo);

		// 5. 过滤订单表中的退款成功的数据
		Table orderInfo = tEnv.sqlQuery(
				"select " +
						"data['id'] id," +
						"data['user_id'] user_id," +
						"data['province_id'] province_id " +
						"from topic_db " +
						"where `table`='order_info' " +
						"and `type`='update' " +
						"and `old`['order_status'] is not null " +
						"and `data`['order_status']='1006'");
		tEnv.createTemporaryView("order_info", orderInfo);

		// 6. 4 张表的 join
		Table result = tEnv.sqlQuery(
				"select " +
						"rp.id," +
						"oi.user_id," +
						"rp.order_id," +
						"rp.sku_id," +
						"oi.province_id," +
						"rp.payment_type," +
						"dic.info.dic_name payment_type_name," +
						"date_format(rp.callback_time,'yyyy-MM-dd') date_id," +
						"rp.callback_time," +
						"ori.refund_num," +
						"rp.total_amount," +
						"rp.ts " +
						"from refund_payment rp " +
						"join order_refund_info ori " +
						"on rp.order_id=ori.order_id and rp.sku_id=ori.sku_id " +
						"join order_info oi " +
						"on rp.order_id=oi.id " +
						"join base_dic for system_time as of rp.pt as dic " +
						"on rp.payment_type=dic.dic_code ");

		// 7.写出到 kafka
		tEnv.executeSql("create table " + Constant.TOPIC_DWD_TRADE_REFUND_PAYMENT_SUCCESS + "(" +
				"id string," +
				"user_id string," +
				"order_id string," +
				"sku_id string," +
				"province_id string," +
				"payment_type_code string," +
				"payment_type_name string," +
				"date_id string," +
				"callback_time string," +
				"refund_num string," +
				"refund_amount string," +
				"ts bigint ," +
				"PRIMARY KEY (id) NOT ENFORCED " +
				")" + SQLUtil.getUpsertKafkaDDL(Constant.TOPIC_DWD_TRADE_REFUND_PAYMENT_SUCCESS));
		result.executeInsert(Constant.TOPIC_DWD_TRADE_REFUND_PAYMENT_SUCCESS);


	}


}
