package api.tableapi;

import api.beans.SensorReading;
import api.beans.SensorReading2;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.util.Collector;

/**
 * 从文件获取数据
 * 聚合统计
 * 输出到文件
 */
public class TableTest4_file2 {

    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 2. 表的创建：连接外部系统，读取数据
        String fromPath = "D:\\IdeaProjects\\springboot-flink-1\\flinkTutorial\\src\\main\\resources\\sensor.txt";
        String toPath = "D:\\IdeaProjects\\springboot-flink-1\\flinkTutorial\\src\\main\\resources\\outData";
        // 定义表结构
        Schema schema1 = new Schema()
                .field("id", DataTypes.STRING())
                .field("timestamp", DataTypes.BIGINT())
                .field("temp", DataTypes.DOUBLE());
        tableEnv.connect(new FileSystem().path(fromPath))
                .withFormat(new Csv())
                .withSchema(schema1)
                .createTemporaryTable("inputTable");

        // 3 聚合统计
        // 打印输出
        Table sqlAggTable = tableEnv.sqlQuery("select id, count(id) as cnt, avg(temp) as avgTemp from inputTable group by id");
        DataStream<Tuple2<Boolean, SensorReading2>> sqlAggStream = tableEnv.toRetractStream(sqlAggTable, SensorReading2.class);
        //sqlAggStream.print("test1:");
        sqlAggStream.print("sqlAggStream:");

        DataStream<SensorReading2> outputStream = sqlAggStream.flatMap(new FlatMapFunction<Tuple2<Boolean, SensorReading2>, SensorReading2>() {
            @Override
            public void flatMap(Tuple2<Boolean, SensorReading2> value, Collector<SensorReading2> out) throws Exception {
                if (value.f0) {
                    out.collect(value.f1);
                }
            }
        });
        outputStream.print("outputStream:");
        // 4. 输出到文件
        outputStream.addSink(
                StreamingFileSink.forRowFormat(
                        new Path(toPath),
                        new SimpleStringEncoder<SensorReading2>()
                ).build()
        );

        env.execute();

    }

}
