package com.xl.flinkdemo.TableAPI;

import com.xl.flinkdemo.entity.SensorReading;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.GroupedTable;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.java.BatchTableEnvironment;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * @ClassName Example
 * @Description TODO
 * @Author hxl
 * @Date 2021/5/21 11:37
 * Version 1.0
 **/
public class ExampleInputOutput {
  public static void main(String[] args) throws Exception {

    StreamExecutionEnvironment environment =StreamExecutionEnvironment.getExecutionEnvironment();
    DataStreamSource<String> dataStreamSource =
        environment.readTextFile("D:\\studyspace\\flinkdemo\\src\\main\\resources\\sensors\\input.txt");

    //按照顺序,设置并行度为1
    environment.setParallelism(1);
    //将这个流转成对象实体
    DataStream<SensorReading> results = dataStreamSource.map(line->{
      String[] filter = line.split(",");
      SensorReading sensorReading = new SensorReading(filter[0],new Long(filter[1]),new Double(filter[2]));
      return sensorReading;
    });

    //开始进行tableAPI

    //1.1基于老版本planner的流处理
    EnvironmentSettings oldStreamSettings = EnvironmentSettings.newInstance()
        .inStreamingMode()
        .useOldPlanner()
        .build();
    StreamTableEnvironment oldStreamTableEnv = StreamTableEnvironment.create(environment, oldStreamSettings);


    //1.2基于老版本planner的批处理
    ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
    BatchTableEnvironment oldBatchTableEnv = BatchTableEnvironment.create(batchEnv);

    //1.3 基于blink的流处理
   /* EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
        .useBlinkPlanner()
        .inStreamingMode()
        .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(environment, blinkStreamSettings);

    //1.4 基于blink的批处理
    EnvironmentSettings blinkBatchSettings = EnvironmentSettings.newInstance()
        .useBlinkPlanner()
        .inBatchMode()
        .build();
    TableEnvironment blinkBatchTableEnv = TableEnvironment.create(blinkBatchSettings);*/



    //2. 表的创建：连接外部系统，读取数据
    String filtPath = "D:\\studyspace\\flinkdemo\\src\\main\\resources\\sensors\\input.txt";
    oldStreamTableEnv.connect(new FileSystem().path(filtPath))
                     .withFormat(new Csv())
                     .withSchema(new Schema().field("id", DataTypes.STRING())
                                            .field("temp",DataTypes.BIGINT())
                                            .field("sensors",DataTypes.DOUBLE()))
                    .createTemporaryTable("inputTable");

    String outFilePath = "D:\\studyspace\\flinkdemo\\src\\main\\resources\\sensors\\output.txt";
    oldStreamTableEnv.connect(new FileSystem().path(outFilePath))
        .withFormat(new Csv())
        .withSchema(new Schema().field("id", DataTypes.STRING())
            .field("temp",DataTypes.BIGINT()))
        .createTemporaryTable("outPutTable");

    Table inputTable = oldStreamTableEnv.fromDataStream(results);
    /*inputTable.printSchema();
    oldStreamTableEnv.toAppendStream(inputTable,Row.class).print("inputTable");*/


    //3. 表的查询 查询转换
    // 3.1 Table API
    // 简单转换
    Table resultTable = inputTable.select("id,temp").filter("id = 'sensor_3'");

    // 聚合统计
    Table aggTable = inputTable.groupBy("id").select("id,id.count as count,temp.avg as avgTemp");



    //3.2 SQL实现
    oldStreamTableEnv.sqlQuery("select id,temp from inputTable where id = 'sensor_3'");

    Table sqlAggTable = oldStreamTableEnv.sqlQuery("select id,count(id) as idCount,avg(temp) as avgTemp from inputTable group by id");


    //打印输出

    oldStreamTableEnv.toAppendStream(resultTable,Row.class).print("result");
    oldStreamTableEnv.toRetractStream(aggTable,Row.class).print("aggTable");
    oldStreamTableEnv.toRetractStream(sqlAggTable,Row.class).print("sqlAggTable");

    //输出到文件，不支持有聚合操作有更新操作的
    resultTable.insertInto("outPutTable");

    environment.execute();

  }
}
