package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by Smexy on 2023/3/4
 *
 *    tableEnv.sqlQuery("select sql");
 *    tableEnv.executeSql("insert sql");
 *
 *    TableAPI: 需要一个Table对象进行操作
 *    SQL:  需要有一个表名，进行操作
 */
public class Demo1_Read
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //创建编程环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());

        //获取动态表
        Table table = tableEnv.fromDataStream(ds);

        //为表对象起名字
        //tableEnv.createTemporaryView("t1",table);

        tableEnv
                //.sqlQuery("select * from t1 where id = 's1' ")
                .sqlQuery("select * from "+table +" where id = 's1' ")
                .execute()
                .print();


    }
}
