package com.ygx.flink.practice;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @author YangGuoXiang
 * @version 1.0
 * @date 2022/2/22 14:25
 */
public class Practice01 {

    public static void main(String[] args) throws Exception {

        // 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, bsSettings);

//        // create an input Table
        tableEnv.executeSql(
                "CREATE TABLE user_log (" +
                "    user_id VARCHAR," +
                "    item_id VARCHAR," +
                "    category_id VARCHAR," +
                "    behavior VARCHAR," +
                "    ts TIMESTAMP_LTZ(3)" +
                ") WITH (" +
                "    'connector' = 'kafka'," +
                "    'topic' = 'user_behavior'," +
                "    'scan.startup.mode' = 'latest-offset'," +
                "    'properties.bootstrap.servers' = 'cdh04:9092,cdh05:9092,cdh06:9092'," +
                "    'properties.group.id' = 'pvuv_sql'," +
                "    'format' = 'json'," +
                "    'json.ignore-parse-errors' = 'true'," +
                "    'json.timestamp-format.standard' = 'ISO-8601'" +
                ")");

        // register an output Table
        tableEnv.executeSql("" +
                "CREATE TABLE pvuv_sink (" +
                "    dt VARCHAR," +
                "    pv BIGINT," +
                "    uv BIGINT," +
                "    PRIMARY KEY (dt) NOT ENFORCED" +
                ") WITH (" +
                "    'connector' = 'jdbc'," +
                "    'url' = 'jdbc:mysql://192.168.5.9:3306/flink-test?useSSL=false&serverTimezone=UTC'," +
                "    'username' = 'root'," +
                "    'password' = '123456'," +
                "    'table-name' = 'pvuv_sink_sql'" +
                ")");

        tableEnv.executeSql("" +
                "INSERT INTO pvuv_sink" +
                " SELECT" +
                "  DATE_FORMAT(ts, 'yyyy-MM-dd HH:00') dt," +
                "  COUNT(*) AS pv," +
                "  COUNT(DISTINCT user_id) AS uv" +
                " FROM user_log" +
                " GROUP BY DATE_FORMAT(ts, 'yyyy-MM-dd HH:00')");

//        Table tableResult = tableEnv.sqlQuery("SELECT ts FROM user_log");
//        tableEnv.toAppendStream(tableResult,Row.class).print("tableResult");
        env.execute("SqlKafkaToMysql");

    }

}
