package com.flinksql.test;


import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author: Lin
 * @create: 2021-06-16 10:21
 * @description: FlinkTableAPI使用connect读取文本数据，1.10写法
 **/
public class FlinkTableAPI_Test3 {
    public static void main(String[] args) throws Exception {
        //1.创建表的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //2.创建表：连接文件，并创建一个临时表（也就是动态表）
        tableEnv.connect(new FileSystem().path("input/sensor.txt"))
                .withFormat(new Csv().fieldDelimiter(',').lineDelimiter("\n"))
                .withSchema(new Schema().field("id", DataTypes.STRING())
                                        .field("ts", DataTypes.BIGINT())
                                        .field("vc",DataTypes.INT()))
                .createTemporaryTable("sensor");

        //3.转换成表对象，对表进行查询
        Table table = tableEnv.from("sensor");
        Table selectResult = table.groupBy($("id"))
                .aggregate($("id").count().as("id_count"))
                .select($("id"), $("id_count"));

        //4.把动态表装换成流，使用通过格式row获取
        DataStream<Tuple2<Boolean, Row>> dataStreamDS = tableEnv.toRetractStream(selectResult, Row.class);

        dataStreamDS.print();

        env.execute();
    }
}
