package com.jianying.day11;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * ClassName: FlinkSql01_Env
 * Package: com.atguigu.day11
 * Description:
 *
 * @Author: tubage
 * @Create: 2024/4/13 11:38
 * @Version: 1.0
 */
public class FlinkSql01_Env {
    public static void main(String[] args) throws Exception {
        // TODO 流——》动态表——》持续查询——》动态表——》直接输出/转成流

        // TODO 流处理环境、并行度
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // TODO 表执行环境
        // EnvironmentSettings.newInstance()
        //         .inStreamingMode()
        //         .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 流——》动态表

        // 准备流
        DataStreamSource<Integer> sourceDS = env.fromElements(1, 2, 3, 4);

        // 流转换为动态表1：api
        // Table testTable = tableEnv.fromDataStream(sourceDS,$("num"));
        // tableEnv.executeSql("select * from" + testTable).print();// 拼接对象 test 底层调用Table对象toString 注册到表执行环境
        // tableEnv.createTemporaryView("test",testTable);
        // tableEnv.executeSql("select * from test").print();


        // 流转换为动态表2：
        // tableEnv.createTemporaryView("t_test", sourceDS, $("num"));
        // TableResult result = tableEnv.executeSql("select * from t_test");
        // result.print();


        // 流转换为动态表3：连接器
        // TableResult tableResult = tableEnv.executeSql("CREATE TABLE source ( \n" +
        //         "    id INT, \n" +
        //         "    ts BIGINT, \n" +
        //         "    vc INT \n" +
        //         ") WITH ( \n" +
        //         "    'connector' = 'datagen', \n" +
        //         "    'rows-per-second'='1', \n" +
        //         "    'fields.id.kind'='random', \n" +
        //         "    'fields.id.min'='1', \n" +
        //         "    'fields.id.max'='10', \n" +
        //         "    'fields.ts.kind'='sequence', \n" +
        //         "    'fields.ts.start'='1', \n" +
        //         "    'fields.ts.end'='1000000', \n" +
        //         "    'fields.vc.kind'='random', \n" +
        //         "    'fields.vc.min'='1', \n" +
        //         "    'fields.vc.max'='100'\n" +
        //         ");");
        //
        // TableResult result = tableEnv.executeSql("select * from source");
        // result.print();

        // TODO 持续查询
        // 流转动态表
        // tableEnv.createTemporaryView("t_test", sourceDS, $("num"));
        //
        // // sql方式executeSql  sqlQuery
        // TableResult result = tableEnv.executeSql("select * from t_test");
        // result.print();
        //
        // Table table = tableEnv.sqlQuery("select * from t_test");
        // tableEnv.createTemporaryView("test", table);
        // tableEnv.executeSql("select * from test").print();
        //
        // table.execute().print();

        // API方式(基本不用)
        // Table table1 = tableEnv.sqlQuery("select * from t_test");
        // table1
        //         .select($("num"))
        //         .execute()
        //         .print();

        // TODO 直接输出
        // tableEnv.executeSql("CREATE TABLE source ( \n" +
        //         "    id INT, \n" +
        //         "    ts BIGINT, \n" +
        //         "    vc INT \n" +
        //         ") WITH ( \n" +
        //         "    'connector' = 'datagen', \n" +
        //         "    'rows-per-second'='1', \n" +
        //         "    'fields.id.kind'='random', \n" +
        //         "    'fields.id.min'='1', \n" +
        //         "    'fields.id.max'='10', \n" +
        //         "    'fields.ts.kind'='sequence', \n" +
        //         "    'fields.ts.start'='1', \n" +
        //         "    'fields.ts.end'='1000000', \n" +
        //         "    'fields.vc.kind'='random', \n" +
        //         "    'fields.vc.min'='1', \n" +
        //         "    'fields.vc.max'='100'\n" +
        //         ");");
        //
        // tableEnv.executeSql("CREATE TABLE sink (\n" +
        //         "    id INT, \n" +
        //         "    ts BIGINT, \n" +
        //         "    vc INT\n" +
        //         ") WITH (\n" +
        //         "'connector' = 'print'\n" +
        //         ");");
        //
        // //tableEnv.executeSql("insert into sink select * from source");
        //
        // Table queryTable = tableEnv.sqlQuery("select * from source where id =1");
        // //tableEnv.createTemporaryView("query_table",queryTable);
        // //tableEnv.executeSql("insert into sink select * from query_table");
        //
        // queryTable.executeInsert("sink");


        // TODO 动态表转为流
        tableEnv.executeSql("CREATE TABLE source ( \n" +
                "    id INT, \n" +
                "    ts BIGINT, \n" +
                "    vc INT \n" +
                ") WITH ( \n" +
                "    'connector' = 'datagen', \n" +
                "    'rows-per-second'='1', \n" +
                "    'fields.id.kind'='random', \n" +
                "    'fields.id.min'='1', \n" +
                "    'fields.id.max'='10', \n" +
                "    'fields.ts.kind'='sequence', \n" +
                "    'fields.ts.start'='1', \n" +
                "    'fields.ts.end'='1000000', \n" +
                "    'fields.vc.kind'='random', \n" +
                "    'fields.vc.min'='1', \n" +
                "    'fields.vc.max'='100'\n" +
                ");");
        Table table = tableEnv.sqlQuery("select * from source");
        DataStream<Row> dataStream = tableEnv.toDataStream(table);
        dataStream.print();

        Table table1 = tableEnv.sqlQuery("select id, sum(vc) sumVc from source group by id");
        DataStream<Row> changelogStream = tableEnv.toChangelogStream(table1);
        changelogStream.print();


        // TODO 注意：在使用FlinkSQL的print方法的时候，方法会一直等到流中数据结束才会结束执行


        // TODO 提交作业 动态表不需要调用该方法，如果是流 需要execute
        env.execute();

    }
}
