package com.zyx.flinkdemo.sql.source;


import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author zyx
 * @since 2021/5/23 08:12
 * desc: FileSource的FlinkSQL案例
 */
public class SourceFileDemo {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.使用Connect方式读取文本数据 (过时)
        /*tableEnv.connect(new FileSystem().path("input/flink/sensor.txt"))
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("ts", DataTypes.BIGINT())
                        .field("vc", DataTypes.INT()))
                .withFormat(new Csv()
                        .fieldDelimiter(',')
                        .lineDelimiter("\n")
                )
                .createTemporaryTable("sensor");*/

        tableEnv.executeSql("create table sensor (id string, ts bigint, vc int) with(" +
                "  'connector' = 'filesystem'," +
                "  'path' = 'input/flink/sensor.txt'," +
                "  'format' = 'csv'" +
                ")");

        //3.将连接器应用,转换为表
        Table sensor = tableEnv.from("sensor");

        //4.查询
        Table resultTable = sensor.groupBy($("id"))
                .select($("id"), $("id").count().as("ct"));

        //5.转换为流进行输出
        DataStream<Tuple2<Boolean, Row>> tuple2DataStream = tableEnv
                .toRetractStream(resultTable, Row.class);

        //6.打印数据
        tuple2DataStream.print();

        //7.执行任务
        env.execute();
    }
}
