package com.hu.flink12.api.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: hujianjun
 * @Create Date: 2021/2/20 13:55
 * @Describe: 实时获取kafka数据并解析存入mysql中
 */
public class ReadKafka2Mysql {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings environmentSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, environmentSettings);

        //TODO 读取kafka source
        /**
             CREATE TABLE user_log (
             user_id VARCHAR,
             item_id VARCHAR,
             category_id VARCHAR,
             behavior VARCHAR,
             ts VARCHAR
             ) WITH (
             'connector.type' = 'kafka', -- 使用 kafka connector
             'connector.version' = 'universal',  -- kafka 版本，universal 支持 0.11 以上的版本
             'connector.topic' = 'user_behavior',  -- kafka topic
             'connector.startup-mode' = 'earliest-offset', -- 从起始 offset 开始读取
             'connector.properties.0.key' = 'zookeeper.connect',  -- 连接信息
             'connector.properties.0.value' = 'localhost:2181',
             'connector.properties.1.key' = 'bootstrap.servers',
             'connector.properties.1.value' = 'localhost:9092',
             'update-mode' = 'append',
             'format.type' = 'json',  -- 数据源格式为 json
             'format.derive-schema' = 'true' -- 从 DDL schema 确定 json 解析规则
             )
         */

        String flinkKafkaDDL = "CREATE TABLE user_log (\n" +
                "    user_id VARCHAR,\n" +
                "    item_id VARCHAR,\n" +
                "    category_id VARCHAR,\n" +
                "    behavior VARCHAR,\n" +
                "    ts VARCHAR\n" +
                ") WITH (\n" +
                "    'connector.type' = 'kafka', -- 使用 kafka connector\n" +
                "    'connector.version' = 'universal',  -- kafka 版本，universal 支持 0.11 以上的版本\n" +
                "    'connector.topic' = 'user_behavior',  -- kafka topic\n" +
                "    'connector.startup-mode' = 'earliest-offset', -- 从起始 offset 开始读取\n" +
                "    'connector.properties.0.key' = 'zookeeper.connect',  -- 连接信息\n" +
                "    'connector.properties.0.value' = 'localhost:2181', \n" +
                "    'connector.properties.1.key' = 'bootstrap.servers',\n" +
                "    'connector.properties.1.value' = 'localhost:9092', \n" +
                "    'update-mode' = 'append',\n" +
                "    'format.type' = 'json',  -- 数据源格式为 json\n" +
                "    'format.derive-schema' = 'true' -- 从 DDL schema 确定 json 解析规则\n" +
                ")";

        tableEnv.executeSql(flinkKafkaDDL);


        //TODO mysql sink
        /**
         * 对应mysql中建表语句
               CREATE TABLE pv_uv_flink (
                day_str string
                ,pv_cnt bigint
                ,uv_cnt bigint
                ,PRIMARY KEY (day_str) not enforced
              )with (
                'connector' = 'jdbc'
                ,'url' = 'jdbc:mysql://localhost:3306/test'
                ,'username' = 'root'
                ,'password' = 'root'
                ,'table-name' = 'pv_uv_mysql'
              )
         */
        String flinkMysqlDDL = "CREATE TABLE pv_uv_flink (\n" +
                "\tday_str string\n" +
                "\t,pv_cnt bigint\n" +
                "\t,uv_cnt bigint\n" +
                "\t,PRIMARY KEY (day_str) not enforced\n" +
                ")with (\n" +
                "\t'connector' = 'jdbc'\n" +
                "\t,'url' = 'jdbc:mysql://localhost:3306/test'\n" +
                "\t,'username' = 'root'\n" +
                "\t,'password' = 'root'\n" +
                "\t,'table-name' = 'pv_uv_mysql'\n" +
                ")";
        //在flink中建立mysql对应的表
        tableEnv.executeSql(flinkMysqlDDL);

        //查询kafka表数据插入mysql表
        String resultSql = "insert into pv_uv_flink " +
                "select " +
                "substr(ts,1,10) as day_str" +
                ",sum(if(behavior='pv',1,0)) as pv_cnt" +
                ",sum(if(behavior='uv',1,0)) as uv_cnt" +
                " from user_log group by substr(ts,1,10)";
        tableEnv.executeSql(resultSql).print();

        env.execute("读取kafka数据解析存入mysql");
    }
}
