package app.dwd;

import common.Constant;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.SQLUtil;

/**
 * @author cheng
 * @create 2022-10-25 21:21
 */
public class Dwd_TradeCartAdd extends BaseSQLApp {
    public static void main(String[] args) {
        new Dwd_TradeCartAdd().init(
                3010,
                2,
                "Dwd_TradeCartAdd");
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        // 1. 消费 ods_db
        // ddl方式
        readOdsDb(tEnv, "Dwd_TradeCartAdd");
        // sql中的 print 是阻塞式方法
        //        tEnv.sqlQuery("select * from ods_db").execute().print();
        // 2. 从 ods_db中过滤出加购
        Table cartInfo = tEnv.sqlQuery("select \n" +
                " `data`['id'] id, " +
                " `data`['user_id'] user_id, " +
                " `data`['course_id'] course_id, " +
                " `data`['course_name'] course_name, " +
                " `data`['cart_price'] cart_price," +
                " 1 course_num, " +
                " ts " +
                "from ods_db " +
                "where `database`='edu' " +
                "and `table`='cart_info' " +
                "and (" +
                " `type`='insert' " +
                ")");
        tEnv.createTemporaryView("cart_info", cartInfo);

        // 5. 写出到 kafka 中
        tEnv.executeSql("create table dwd_trade_cart_add( \n" +
                "id string, \n " +
                "user_id string,  " +
                "course_id string,  " +
                "course_name string,  " +
                "cart_price string,  " +
                "course_num int,  " +
                "ts bigint  " +
                ")" + SQLUtil.getKafkaSink(Constant.TOPIC_DWD_TRADE_CART_ADD));

        cartInfo.executeInsert("dwd_trade_cart_add");
    }

}
