package cn.jly.flink.table_sql;

import cn.jly.flink.utils.FlinkUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * flink1.11之后底层默认使用blink的planner，1.10之前使用的老版本的planner
 *
 * @PackageName cn.jly.flink.table_sql
 * @ClassName TableAndSqlDemo02
 * @Description 完全table和sql api
 * @Author 姬岚洋
 * @Date 2021/2/8 上午 11:02
 */
public class TableAndSqlDemo02_fileSystem {
    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        final StreamExecutionEnvironment env = FlinkUtils.getStreamExecutionEnv();
        env.setParallelism(1);

        final StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(
                env,
                EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
        );

        // 批处理
        // BatchTableEnvironment.create(FlinkUtils.getBatchExecutionEnv());

        // 2. 表的创建：连接外部系统，读取数据
        // 2.1 读取文件
//        streamTableEnv
//                .connect(new FileSystem().path("file:///d:/SensorReading.txt"))
//                .withFormat(new Csv().fieldDelimiter(','))
//                .withSchema(
//                        new Schema()
//                                .field("id", DataTypes.STRING())
//                                .field("temp", DataTypes.DOUBLE())
//                                .field("timestamp", DataTypes.BIGINT())
//                )
//                .createTemporaryTable("sensorTable");

        // 构造sql和建表
        final String createSql = new StringBuilder()
                .append("create table if not exists sensorTable(")
                .append("       id STRING,")
                .append("       temp DOUBLE,")
                .append("       `timestamp` BIGINT")
                .append(") with (")
                .append("       'connector' = 'filesystem',")
                .append("       'path' = 'file:///d:/SensorReading.txt',")
                .append("       'format' = 'csv',")
                .append("       'csv.field-delimiter'=','")
                .append(")")
                .toString();
        streamTableEnv.executeSql(createSql);

        // 简单输出
        final Table sensorTable = streamTableEnv.from("sensorTable");
        sensorTable.printSchema();
        final DataStream<Row> resultStream = streamTableEnv.toAppendStream(sensorTable, Row.class);
        resultStream.print("schema");

        // table api
        final Table resultTable = sensorTable.select($("id"), $("temp"), $("timestamp"))
                .filter($("id").isEqual("1001"));
        // 这种查询可以一直追加
        streamTableEnv.toAppendStream(resultTable, Row.class)
                .print("table");

        // sql
        String sql = "select id, temp, `timestamp` from sensorTable where id = '1001'";
        final Table resultSqlTable = streamTableEnv.sqlQuery(sql);
        // 这种查询可以一直追加
        streamTableEnv.toAppendStream(resultSqlTable, Row.class)
                .print("sql");

        // count
        String countSql = "select count(*) from sensorTable";
        final Table countResultTable = streamTableEnv.sqlQuery(countSql);
        // 这种统计聚合查询无法追加
        streamTableEnv.toRetractStream(countResultTable, Row.class)
                .print("count");

        // sql - group by
        String groupBySql = "select id, count(id) from sensorTable group by id having count(id) > 2";
        final Table groupByResultTable = streamTableEnv.sqlQuery(groupBySql);
        streamTableEnv.toRetractStream(groupByResultTable, Row.class)
                .print("group by");

        // 执行
        env.execute("TableAndSqlDemo02");
    }
}
