package com.haozhen.table;

import com.mysql.cj.protocol.ResultsetRowsOwner;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author haozhen
 * @email haozh@ync1.com
 * @date 2021/2/26  1:12
 */
public class MyTable {
    public static void main(String[] args) throws Exception {
        // 拿到flink应用程序执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 做出table环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        // 获取流失数据源
        DataStreamSource<Tuple2<String, Integer>> name = env.addSource(new SourceFunction<Tuple2<String, Integer>>() {

            @Override
            public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
                while (true) {
                    ctx.collect(new Tuple2<>("name", 10));
                    Thread.sleep(1000);
                }
            }

            @Override
            public void cancel() {

            }
        });
        //将流式数据源 做成table
        Table table = tenv.fromDataStream(name,$("name"),$("age"));
        // 对table 的数据进行查询
        Table name1 = table.select($("name"));
        // 将数据打印出去
        DataStream<Row> rowDataStream = tenv.toAppendStream(name1, Row.class);

        rowDataStream.print();

        env.execute();
    }
}
