package cn.doitedu.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/3/2
 * @Desc: 学大数据，上多易教育
 * <p>
 **/
public class _03_UpsertKafkaConnector_Demo {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建表，映射数据源
        tenv.executeSql(
                "CREATE TABLE students_mysql( " +
                        "    id     int,      " +
                        "    name   string,   " +
                        "    gender string,   " +
                        "    age    int,      " +
                        "    salary float     " +
                        ") with (             " +
                        "    'connector' = 'jdbc',      " +
                        "    'url' = 'jdbc:mysql://doitedu:3306/doit46', " +
                        "    'table-name' = 'student',  " +
                        "    'username' = 'root',       " +
                        "    'password' = 'root'        " +
                        ")");


        // 建表，映射目的地 : mysql.doit46.avg_salary表
        tenv.executeSql(
                "create table avg_salary_kafka(                     \n" +
                        "   gender string,                          \n" +
                        "   salary float,                           \n" +
                        "   primary key(gender) not enforced        \n" +
                        ") with (                                   \n" +
                        "  'connector' = 'upsert-kafka',                    \n" +
                        "  'topic' = 'ss-3',                                \n" +
                        "  'properties.bootstrap.servers' = 'doitedu:9092', \n" +
                        "  'properties.group.id' = 'doit44_g2',             \n" +
                        "  'key.format' = 'json',                           \n" +
                        "  'value.format' = 'json',                         \n" +
                        "  'value.fields-include' = 'EXCEPT_KEY'            \n" +
                        "  )"
        );


        // 写sql  insert into avg_salary表 select .... from  students_mysql
//        tenv.executeSql(
//                "insert into avg_salary_kafka  " +
//                        "select gender,avg(salary) as salary " +
//                        "from students_mysql " +
//                        "group by gender ");



        // 从前面insert之后的结果中，用upsert-kafka连接器再次读出来
        tenv.executeSql("select * from avg_salary_kafka").print();



    }
}
