package com.atguigu.sql.api;
/*
* 流表转换
* */

import com.atguigu.pojo.Event;
import com.atguigu.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class Flink02_StreamTable {
    public static void main(String[] args) {
        //1.1创建环境:流
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1.2基于流创建流表环境：流表环境
        StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(env);

        //1.3对接数据源,转化成流
        SingleOutputStreamOperator<WaterSensor> ds = env.socketTextStream("hadoop102", 8888)
                .map(
                        line -> {
                            String[] fields = line.split(",");
                            return new WaterSensor(fields[0].trim(), Integer.valueOf(fields[1].trim()), Long.valueOf(fields[2].trim()));
                        }
                );
        //2.将数据流转表
        Table table = streamTableEnv.fromDataStream(ds);
        //2.1 将resultTable对象注册到环境中，得到一张t1表
        streamTableEnv.createTemporaryView("t1",table);
        //2.2 对数据进行处理：追加查询
        Table resultTable = streamTableEnv.sqlQuery("select id ,vc , ts from t1 where vc >=100");

        //2.3 对数据进行处理：更新查询
        Table table1 = streamTableEnv.sqlQuery("select id ,sum(vc) from t1 group by id");

        //表转流：追加查询得到的表
        //DataStream<Row> resultDs = streamTableEnv.toDataStream(resultTable);

        //表转流：更新查询得到的表
        DataStream<Row> updateDs = streamTableEnv.toChangelogStream(table1);

        //resultDs.print();
        updateDs.print();


        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
