package com.hadoop.gmall.realtime.dwd.db.app;

import com.bw.gmall.realtime.common.base.BaseSqlApp;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeCartAdd extends BaseSqlApp {

    public static void main(String[] args) throws Exception {
        new DwdTradeCartAdd().start(10013,4, Constant.TOPIC_DWD_TRADE_CART_ADD);
    }
    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, String groupId) {
        // 1.读取Kafka的数据
        readOdsDb(tableEnv,Constant.TOPIC_DWD_TRADE_CART_ADD);
        // 2. 过滤出加购的数据
        //  1      10       2
        //  1      10       1
        /**
         *
         user_id  sku_id   num
            1      10       2
            1      10       3
            1      10       1
         */
        Table cartInfo = tableEnv.sqlQuery("select\n" +
                "  `data`['id'] id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['sku_id'] sku_id,\n" +
                "  `data`['cart_price'] cart_price,\n" +
                "   if(`type`='insert',`data`['sku_num'],cast(cast(`data`['sku_num'] as bigint) - cast(`old`['sku_num'] as bigint) as string) ) sku_num,\n" +
                "  `data`['sku_name'] sku_name,\n" +
                "  `data`['is_checked'] is_checked,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['operate_time'] operate_time,\n" +
                "  `data`['is_ordered'] is_ordered,\n" +
                "  `data`['order_time'] order_time,\n" +
                "   ts \n" +
                "from topic_db\n" +
                "where `database`='gmall'\n" +
                "and  `table`='cart_info'\n" +
                "and (`type`='insert' or (`type`='update'  and `old`['sku_num'] is not null\n" +
                "and cast(`data`['sku_num'] as bigint) > cast(`old`['sku_num'] as bigint)))\n" +
                " ");

        // 3.创建Kafka表
        tableEnv.executeSql("create table "+Constant.TOPIC_DWD_TRADE_CART_ADD+" (\n" +
                "  id STRING,\n" +
                "  user_id STRING,\n" +
                "  sku_id STRING,\n" +
                "  cart_price STRING,\n" +
                "  sku_num STRING,\n" +
                "  sku_name STRING,\n" +
                "  is_checked STRING,\n" +
                "  create_time STRING,\n" +
                "  operate_time STRING,\n" +
                "  is_ordered STRING,\n" +
                "  order_time STRING,\n" +
                "  ts  BIGINT\n" +
                ")"+ SQLUtil.getKafkaSinkSQL(Constant.TOPIC_DWD_TRADE_CART_ADD));

        // 4.写入Kafka
        cartInfo.insertInto(Constant.TOPIC_DWD_TRADE_CART_ADD).execute();

    }
}
