package com.atguigu.flink.tableapi;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * Created by Smexy on 2023/2/5
 *
 */
public class Demo3_Row
{
    public static void main(String[] args) {
        
       StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
       StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

       SingleOutputStreamOperator<WaterSensor> ds = env
             .socketTextStream("hadoop103", 8888)
             .map(new WaterSensorMapFunction());
       Table table = tableEnvironment.fromDataStream(ds);

        //打印表的结构 取决于流中的类型的属性
        table.printSchema();


        Table result = table.select($("id"), $("vc"))
                           .where($("id").isEqual("s1"));

        /*
            把表再转换为流，调用StreamAPI操作

            Exception in thread "main" org.apache.flink.table.api.ValidationException:
                    Column types of query result and sink for registered table
                    'default_catalog.default_database.Unregistered_DataStream_Sink_1' do not match.

                    查询的结果和要写出的流中的类型不匹配。

Cause: Different number of columns.

Query schema: [id: STRING, vc: INT]
Sink schema:  [id: STRING, ts: BIGINT, vc: INT]

                转换的流中的POJO的属性要和要写出的表的列的元数据对应。

                    不够灵活。

                    flink提供了一个通用的类型Row，用来封装列。
         */
        DataStream<Row> ds2 = tableEnvironment.toDataStream(result, Row.class);

        ds2.map(new MapFunction<Row, String>()
        {
            @Override
            public String map(Row value) throws Exception {
                String id = value.<String>getFieldAs("id");
                Integer vc = value.<Integer>getFieldAs("vc");
                return id + ", " + vc;
            }
        }).print();


        try {
                            env.execute();
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
        
    }
}
