package com.shujia.flink.sql;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;

public class Demo2FlinkSQL {
    public static void main(String[] args) {

        //环境配置对象
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                //流处理模式
                .inStreamingMode()
                //批处理模式
                //.inBatchMode()
                .build();

        //创建flink sql的环境
        TableEnvironment tn = TableEnvironment.create(settings);

        /*
         * executeSql: 可以执行DDL,DQL，DML
         */
        /*
         * 1、创建source 表
         * 创建表读取kafka中的数据
         */
        tn.executeSql("CREATE TABLE student (\n" +
                "  id STRING,\n" +
                "  name STRING,\n" +
                "  age BIGINT,\n" +
                "  gender STRING,\n" +
                "  clazz STRING\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',-- 数据源\n" +
                "  'topic' = 'student', -- topic\n" +
                "  'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',  -- broker列表\n" +
                "  'properties.group.id' = 'testGroup',-- 消费者组\n" +
                "  'scan.startup.mode' = 'latest-offset', -- 读取数据的位置\n" +
                "  'format' = 'csv' -- 数据格式\n" +
                ")");
        /*
         *2、创建sink表，用于接收查询的结果
         */
        tn.executeSql("CREATE TABLE print_table (\n" +
                "  clazz STRING,\n" +
                "  num BIGINT\n" +
                ") WITH (\n" +
                " 'connector' = 'print'\n" +
                ")\n");
        /*
         *3、基于动态表进行连续查询，会生成一个新的动态表
         */

        tn.executeSql("insert into print_table\n" +
                "select \n" +
                "clazz,\n" +
                "count(1) as num\n" +
                "from student\n" +
                "group by clazz");


    }
}
