package com.atguigu.fink.chapter01.tableapi;

import com.atguigu.fink.bean.WaterSensor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @Author lzc
 * @Date 2022/11/28 09:06
 */
public class Flink01_TableApi_BaseUse_3 {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        DataStreamSource<WaterSensor> stream = env.fromElements(
            new WaterSensor("s1", 1L, 10),
            new WaterSensor("s1", 2L, 10),
            new WaterSensor("s2", 3L, 20),
            new WaterSensor("s1", 4L, 30),
            new WaterSensor("s1", 5L, 40),
            new WaterSensor("s1", 6L, 50)
        );
    
        // 1. 创建一个表的执行环境
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        // 2. 通过表的执行环境, 把流转成表
        Table table = tEnv.fromDataStream(stream);
        // 3. 在动态表上执行连续查询,查询的结果又是一个动态表
        // select id, sum(vc), 'a' as vc_sum from t group by id
        Table resultTable = table
            .groupBy($("id"))
            .aggregate($("vc").sum().as("vc_sum"))
            .select($("id"), $("vc_sum"));
       // +I  s1, 10
       // -U  s1, 10
       // +U  s1, 20
        /*DataStream<Tuple2<Boolean, Row>> s1 = tEnv
            // 当表中的数据有更新的时候, 使用撤回流
            .toRetractStream(resultTable, Row.class);*/
//        s1.filter(t -> t.f0).map(t -> t.f1).print();
        
        // 把表转成 changelog 流
       // DataStream<Row> s1 = tEnv.toChangelogStream(resultTable);
    
        // 不能消费update changes
        //DataStream<Row> s1 = tEnv.toDataStream(resultTable);
       // s1.print();
    
    
        env.execute();
        
      
    
    }
}
