package com.atguigu.flink.chapter10.join;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

public class JoinToKafka {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        //设置空闲状态时间
        //tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(20));
        //tEnv.getConfig().set("table.exec.state.ttl","20 second");

        tEnv.executeSql("create table t1(" +
                "id string," +
                "name sring" +
                ") with(" +
                "  'connector' = 'kafka', " +
                "  'topic' = 't1', " +
                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                "  'properties.group.id' = 'atguigu', " +
                "  'scan.startup.mode' = 'latest-offset', " +
                "  'format' = 'csv' " +
                ")");

        tEnv.executeSql("create table t2(" +
                "id string," +
                "age int" +
                ") with(" +
                "  'connector' = 'kafka', " +
                "  'topic' = 't1', " +
                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                "  'properties.group.id' = 'atguigu', " +
                "  'scan.startup.mode' = 'latest-offset', " +
                "  'format' = 'csv' " +
                ")");

        //左连接
        Table result = tEnv.sqlQuery("select " +
                "t1.id," +
                "name," +
                "age" +
                " from t1 " +
                "left join t2 on t1.id = t2.id");

        tEnv.executeSql("create table t14(" +
                "id string," +
                "name string," +
                "age int," +
                "primary key(id) not enforced" +
                ") with(" +
                "  'connector' = 'upsert-kafka', " +
                "  'topic' = 't14', " +
                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                "  'key.format' = 'json', " +
                "  'value.format' = 'json' " +
                ")");

        result.executeInsert("t14");
    }
}


//public class JoinToKafka {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//
//        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
//        //设置空闲状态时间
//        //tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(20));
//        //tEnv.getConfig().set("table.exec.state.ttl","20 second");
//
//        tEnv.executeSql("create table t1(" +
//                "id string," +
//                "name sring" +
//                ") with(" +
//                "  'connector' = 'kafka', " +
//                "  'topic' = 't1', " +
//                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
//                "  'properties.group.id' = 'atguigu', " +
//                "  'scan.startup.mode' = 'latest-offset', " +
//                "  'format' = 'csv' " +
//                ")");
//
//        tEnv.executeSql("create table t2(" +
//                "id string," +
//                "age int" +
//                ") with(" +
//                "  'connector' = 'kafka', " +
//                "  'topic' = 't1', " +
//                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
//                "  'properties.group.id' = 'atguigu', " +
//                "  'scan.startup.mode' = 'latest-offset', " +
//                "  'format' = 'csv' " +
//                ")");
//
//
//        //左连接
//        Table result = tEnv.sqlQuery("select " +
//                "t1.id," +
//                "name," +
//                "age " +
//                "from t1 " +
//                "left join t2 on t1.id = t2.id");
//
//        tEnv.executeSql("create table t14(" +
//                "id string," +
//                "name string," +
//                "age int" +
//                ") with(" +
//                "  'connector' = 'upsert-kafka', " +
//                "  'topic' = 't14', " +
//                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
//                "  'key.format' = 'json', " +
//                "  'value.format' = 'json' " +
//                ")");
//
//        result.executeInsert("t14");
//
//    }
//}