package com.zy.gmall.realtime.app.dwd.db;

import com.zy.gmall.realtime.util.HbaseUtil;
import com.zy.gmall.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //1
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //2
        //3
        tableEnv.executeSql(KafkaUtil.getKafkaSourceTable("dwd_trade_cart_add"));
       // tableEnv.executeSql("select * from topic_db").print();

        //4
        Table cartInfo = tableEnv.sqlQuery("select \n" +
                "    `data`['id'] id,\n" +
                "    `data`['user_id'] user_id,\n" +
                "    `data`['sku_id'] sku_id,\n" +
                "    if(`type`='insert',`data`['sku_num'],CAST((CAST(`data`['sku_num'] AS INT) - CAST(`old`['sku_num'] AS INT)) AS string)) sku_num,\n" +
                "    ts\n" +
                "from topic_db where `table`='cart_info' and (`type`='insert' or \n" +
                "(`type`='update' and `old`['sku_num'] is not null and CAST(`data`['sku_num'] AS INT) > CAST(`old`['sku_num'] AS INT)))");
        tableEnv.createTemporaryView("cart_info",cartInfo);
        //5
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
                "  id string,\n" +
                "  user_id string,\n" +
                "  sku_id string,\n" +
                "  sku_num string,\n" +
                "  ts string,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") " + KafkaUtil.getUpsertKafkaSinkConnector("test"));

        tableEnv.executeSql("insert into dwd_trade_cart_add select * from cart_info");
    }
}
