package com.atguigu.flinksql.daytest.sql;

import com.atguigu.datastream.bean.WaterSensor;
import com.sun.org.apache.bcel.internal.generic.NEW;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Properties;

/**
 * ClassName: Test02
 * Package: com.atguigu.flinksql.daytest.sql
 * Description:
 *           1.1 使用DataStream方式读取Kafka数据
 * 	         1.2 将流转换为动态表
 * 	         1.3 加载MySQL某张表作为LookUp表
 * 	         1.4 关联两张表
 * @Author ChenJun
 * @Create 2023/4/22 9:15
 * @Version 1.0
 */
public class Test02 {
    public static void main(String[] args) {

        //1. 获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2. 从kafka读取数据
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092");
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"1");
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");

        DataStreamSource<String> streamSource = env.addSource(new FlinkKafkaConsumer<>("test1109", new SimpleStringSchema(), properties));


        //创建为动态表
        Table table = tableEnv.fromDataStream(streamSource);
        tableEnv.createTemporaryView("t1",table);


        //加载MySQL某张表作为LookUp表
        tableEnv.executeSql(""+
                "CREATE TABLE user_info(\n" +
                "    id STRING,\n" +
                "    name STRING, \n" +
                "    age Integer,\n" +
                "    primary key(id) not enforced\n" +
                ") WITH (\n" +
                "    'connector'='jdbc',\n" +
                "    'url' = 'jdbc:mysql://hadoop102:3306/test?useUnicode=true&characterEncoding=UTF-8',\n" +
                "    'username' = 'root',\n" +
                "    'password' = '000000',\n" +
                "    'connection.max-retry-timeout' = '60s',\n" +
                "    'table-name' = 'user_info',\n" +
                "    'sink.buffer-flush.max-rows' = '500',\n" +
                "    'sink.buffer-flush.interval' = '1s',\n" +
                "    'sink.max-retries' = '3',\n" +
                "    'sink.parallelism' = '1'\n" +
                ")");

        // 关联两张表
        tableEnv.sqlQuery(""+
                "select\n" +
                "    t1.id,\n" +
                "    t1.vc,\n" +
                "    t2.name\n" +
                "from t1 \n" +
                "join user_info FOR SYSTEM_TIME AS OF t1.pt AS t2\n" +
                "on t1.id=t2.id");


    }
}
