package cn._51doit.live.jobs;

import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 从Kafka中读数据，然后关联MySQL的维表，最后将结果输出到MySQL中
 */
public class SQLGiftCount {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);
        StreamTableEnvironment tEnv = FlinkUtils.getStreamTableEnv();
        FlinkUtils.env.enableCheckpointing(parameterTool.getInt("checkpoint.interval", 60000));

        //定义一个Kafka的Source(Source表)
        tEnv.executeSql(
                "CREATE TABLE tb_user_event (\n" +
                        "  `carrier` STRING,  \n" +
                        "  `deviceId` STRING,  \n" +
                        "  `deviceType` STRING,  \n" +
                        "  `eventId` STRING,  \n" +
                        "  `isNew` INT,  \n" +
                        "  `latitude` DOUBLE,  \n" +
                        "  `longitude` DOUBLE,  \n" +
                        "  `netType` STRING,  \n" +
                        "  `osName` STRING,  \n" +
                        "  `osVersion` STRING,  \n" +
                        "  `properties` MAP<STRING, STRING>,  \n" +
                        "  `releaseChannel` STRING,  \n" +
                        "  `resolution` STRING,  \n" +
                        "  `sessionId` STRING,  \n" +
                        "  `timestamp` BIGINT, \n" +
                        "  proctime as PROCTIME() \n" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'user-log-28',\n" +
                        "  'properties.bootstrap.servers' = 'node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092',\n" +
                        "  'properties.group.id' = 'testGroup',\n" +
                        "  'scan.startup.mode' = 'earliest-offset',\n" +
                        "  'format' = 'json', \n" +
                        "  'json.ignore-parse-errors' = 'true'" +
                        ")"
        );

        //指定维度表
        tEnv.executeSql(
                "CREATE TABLE gift_category_dim (\n" +
                        "    id INT,\n" +
                        "    name STRING, \n" +
                        "    points DOUBLE, \n" +
                        "    deleted INT \n" +
                        ") WITH (\n" +
                        "    'connector' = 'jdbc',\n" +
                        "    'url' = 'jdbc:mysql://node-1.51doit.cn:3306/doit28?characterEncoding=utf-8',\n" +
                        "    'table-name' = 'tb_live_gift',\n" +
                        "    'driver' = 'com.mysql.jdbc.Driver',\n" +
                        "    'username' = 'root',\n" +
                        "    'password' = '123456',\n" +
                        "    'lookup.cache.max-rows' = '5000',\n" + //在flink缓存数据的最大行数
                        "    'lookup.cache.ttl' = '1min'\n" +      //将数据缓存到状态中存活的TTL
                        ")"

        );
        //创建视图，将Source表的数据读出来，join维表, 然后得到一个中间表
        tEnv.executeSql(
                "CREATE VIEW v_tmp AS\n" +
                        "SELECT\n" +
                        "  t1.properties['anchor_id'] anchor_id, \n" +
                        "  t1.releaseChannel , \n" +
                        "  t1.osName, \n" +
                        "  t2.name, \n" +
                        "  t2.points \n" +
                        "FROM (select * from tb_user_event where eventId = 'liveReward') AS t1 LEFT JOIN gift_category_dim FOR SYSTEM_TIME AS OF t1.proctime AS t2 \n" +
                        "ON cast(t1.properties['gift_id'] as int) = t2.id"
        );


        //从视图中取数据插入到Sink表中
        TableResult tableResult = tEnv.executeSql(
                "select anchor_id, releaseChannel, osName, count(*) total_counts, sum(points) total_points from v_tmp group by anchor_id, cube (releaseChannel, osName)"
        );

        tableResult.print();

    }
}
