package com.atguigu.gmall.realtime.app.demo;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class FlinkSQLLookupJoinApp {


    public static void main(String[] args) {

        //订单：
        //{"id":"101","user_id":"u_101","status":"1001", "amount":200,"ts":9900}
        //字典表： base_dic
        // mysql


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        String createOrderInfoSQL="     CREATE TABLE order_info (\n" +
                "        `id` STRING,\n" +
                "       `user_id` STRING,\n" +
                "        `status` STRING,\n" +
                "         `amount` STRING,\n" +
                "          `ts` STRING,\n" +
                "         `proc_time`  as proctime() \n"+
                "          ) WITH (\n" +
                "          'connector' = 'kafka',\n" +
                "         'topic' = 'demo_left_topic',\n" +
                "         'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "          'properties.group.id' = 'flink_sql_join',\n" +
                "          'scan.startup.mode' = 'latest-offset',\n" +
                "           'format' = 'json'\n" +
                "         )";

        tableEnv.executeSql(createOrderInfoSQL);

            //      CREATE   TABLE base_dic (
            //  dic_code STRING,
            //  dic_name STRING,
            //  PRIMARY KEY (dic_code) NOT ENFORCED
            //) WITH (
            //  'connector' = 'jdbc',
            //  'url' = 'jdbc:mysql://hadoop102:3306/gmall',
            //  'table-name' = 'base_dic'
            //);
        String createBaseDicSQL="   CREATE   TABLE base_dic (\n" +
                "               dic_code STRING,\n" +
                "               dic_name STRING,\n" +
                "              PRIMARY KEY (dic_code) NOT ENFORCED\n" +
                "            ) WITH (\n" +
                "              'connector' = 'jdbc',\n" +
                "               'url' = 'jdbc:mysql://hadoop102:3306/gmall',\n" +
                "               'table-name' = 'base_dic',\n" +
                "               'username' = 'root',\n" +
                "               'password' = '000000', \n" +
                "               'lookup.cache.max-rows' ='1000',\n" +
                "               'lookup.cache.ttl' ='60s'\n" +
                "             ) ";

        tableEnv.executeSql(createBaseDicSQL);
 //       tableEnv.executeSql("select * from order_info").print();

        //要跟维度表join的流表要增加一个时间字段，该字段由flink自带的proctime()产生

        //SELECT id,user_id,status,amount,ts
        //FROM order_info AS o
        //  JOIN base_dic FOR SYSTEM_TIME AS OF o.proc_time AS dic
        //    ON o.status = dic.dic_code;
        String lookupJoinSQL="        SELECT id,user_id,status,dic_name as status_name,amount,ts\n" +
                "         FROM order_info AS o\n" +
                "         JOIN base_dic FOR SYSTEM_TIME AS OF o.proc_time AS dic\n" +
                "            ON o.status = dic.dic_code";

        Table lookupjoinTable = tableEnv.sqlQuery(lookupJoinSQL);

        String createSinkTableSQl="CREATE TABLE order_wide (\n" +
        "           id STRING,\n" +
                "           user_id STRING,\n" +
                "           `status` STRING,\n" +
                "           status_name STRING,\n" +
                "           amount STRING,\n" +
                "           `ts` STRING,\n" +
                "           PRIMARY KEY (id) NOT ENFORCED\n" +
                "         ) WITH (\n" +
                "           'connector' = 'upsert-kafka',\n" +
                "           'topic' = 'demo_order_wide',\n" +
                "          'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "           'key.format' = 'json',\n" +
                "           'value.format' = 'json'\n" +
                "         )  ";
        tableEnv.executeSql(createSinkTableSQl);
        tableEnv.executeSql("insert into order_wide select * from "+lookupjoinTable);

    }
}
