package com.deepglint.tableapi;

import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * @author mj
 * @version 1.0
 * @date 2021-11-25 0:36
 */
public class TableTest_CommonAPI {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

//        // 1.1 基于老版本planner的流处理
//        EnvironmentSettings oldSettings = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build();
//        StreamTableEnvironment oldTableEnv = StreamTableEnvironment.create(env, oldSettings);
//
//        // 1.2 基于老版本planner的批处理
//        ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
//        BatchTableEnvironment oldBatchTableEnv = BatchTableEnvironment.create(batchEnv);
//
//        //1.3 基于BLink的流处理
//        EnvironmentSettings binkSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
//        StreamTableEnvironment blinkTableEnv = StreamTableEnvironment.create(env, binkSettings);
//
//        //1.4 基于blink的批处理
//        EnvironmentSettings binkBatchSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build();
//        TableEnvironment blinkBatchTableEnv = TableEnvironment.create(binkBatchSettings);

        // 2.表的创建：连接外部系统，读取数据
        // 2.1 读取文件
        String path = "C:\\Users\\马军\\Desktop\\Idea-workspace\\flink\\src\\main\\resources\\source.txt";
        tableEnv.connect(new FileSystem().path(path))
                .withFormat(new Csv())
                .withSchema(new Schema().
                        field("id", DataTypes.STRING())
                        .field("name", DataTypes.STRING())
                        .field("timestamp", DataTypes.BIGINT())
                        .field("temp", DataTypes.DOUBLE())
                )
                .createTemporaryTable("inputTable");

        Table inputTable = tableEnv.from("inputTable");
//        inputTable.printSchema();
//        tableEnv.toAppendStream(inputTable, Row.class).print();

        // 3.简单转换
        Table resultTable = inputTable.select("id,temp")
                .filter("id === 'sensor1'");

        // 聚合统计
        Table aggTable = inputTable.groupBy("id")
                .select("id,id.count as idCount,temp.avg as avgTemp");

        // 3.2 sql
        Table sqlQuery = tableEnv.sqlQuery("select id, temp from inputTable where id = 'sensor1'");
        Table aggQuery = tableEnv.sqlQuery("select id,count(id) as cnt,avg(temp) as tempAvg from inputTable group by id");

        //输出
        tableEnv.toAppendStream(resultTable, Row.class).print("res");
        tableEnv.toRetractStream(aggTable, Row.class).print("agg");
        tableEnv.toRetractStream(sqlQuery, Row.class).print("sqlQuery");
        tableEnv.toRetractStream(aggQuery, Row.class).print("aggQuery");


        env.execute();

    }
}
