package com.atguigu.edu.realtime.app.dwd;

import com.atguigu.edu.realtime.util.MySqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeCartAddApp {//加购物车表   dwd_trade_cart_add ********************
    public static void main(String[] args) {
        //用FlinkSQL完成
        //将需求拆开分析解读，然后再调研整合处理
        //0.环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1. 数据源
        // 1.1  事实表  topic_db ->  cart_info   kafka
        String createTopicDbSQL="     CREATE TABLE topic_db (\n" +
                "        `database` STRING,\n" +
                "       `table` STRING,\n" +
                "        `type` STRING,\n" +
                "         `data` MAP<STRING,STRING>,\n" +
                "         `old` MAP<STRING,STRING> ,\n" +
                "          `proc_time`  as proctime(), \n"+
                "         ts   STRING \n"+
                "          ) WITH (\n" +
                "          'connector' = 'kafka',\n" +
                "          'topic' = 'topic_db',\n" +
                "          'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "          'properties.group.id' = 'dwd_trade_cart_add_app',\n" +
                "          'scan.startup.mode' = 'group-offsets',\n" +
                "           'format' = 'json'\n" +
                "         )";

        tableEnv.executeSql(createTopicDbSQL);

        //添加购物车
        String selectCartAddSQL="select \n" +
                "`data`['id'] id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['course_id'] course_id,\n" +
                "`data`['course_name'] course_name,\n" +
                "`data`['cart_price'] cart_price,\n" +
                "`data`['img_url'] img_url,\n" +
                "`data`['session_id'] session_id,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['update_time'] update_time,\n" +
                "`data`['deleted'] deleted,\n" +
                "`data`['sold'] sold,\n" +
                " proc_time ,ts \n" +
                " from\n" +
                "         topic_db where `table`='cart_info' and  type= 'insert'\n";

        Table cartAddTable = tableEnv.sqlQuery(selectCartAddSQL);
        tableEnv.createTemporaryView("cart_add_info",cartAddTable);  //临时视图
        //tableEnv.executeSql("select * from "+cartAddTable).print();

        // 3  目标表  dwd_trade_cart_add
        tableEnv.executeSql("" +
                "create table dwd_trade_cart_add(\n" +
                "id string,\n" +
                "user_id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "cart_price string,\n" +
                "ts string,\n" +
                "PRIMARY KEY(id) NOT ENFORCED \n"  +
                ") with( " +
                "    'connector' = 'upsert-kafka', \n" +
                "                      'topic' = 'dwd_trade_cart_add', \n" +
                "                      'properties.bootstrap.servers' = 'hadoop102:9092', \n" +
                "                          'key.format' = 'json' , \n" +
                "                        'value.format' = 'json'  ) "  );

        // 4  写入目标表
        tableEnv.executeSql("" +
                "insert into dwd_trade_cart_add select      ts  from "+cartAddTable);
    }
}




















