package com.atguigu.day10;

import com.atguigu.bean.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class FlinkSQL08_DataStreamToTable_SQL_Agg {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.读取端口数据创建流并将没行数据转换为JavaBean
        SingleOutputStreamOperator<WaterSensor> waterSensorDS = env.socketTextStream("hadoop102", 9999)
                .map(line -> {
                    String[] fields = line.split(",");
                    return new WaterSensor(fields[0],
                            Long.parseLong(fields[1]),
                            Double.parseDouble(fields[2]));
                });

        //3.注册表
        tableEnv.createTemporaryView("sensor", waterSensorDS);

        //4.聚合查询
//        Table resultTable = tableEnv.sqlQuery("select id,count(id) from sensor group by id");
        TableResult tableResult = tableEnv.executeSql("select id,count(id) cnt from sensor group by id");
        tableResult.print();

        //5.转换为撤回流并打印
//        tableEnv.toRetractStream(resultTable, Row.class)
//                .print();

        //6.启动
//        env.execute();

    }

}
