package com.codejiwei.flink.table;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * author: codejiwei
 * date: 2023/8/8
 * desc: broadcast test
 **/
public class Flink_Table_API_05 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<Tuple3<String, String, Long>> sourceDS = env.readTextFile("src/main/resources/data/ReceiptLog.csv")
                .map(line -> {
                    String[] fields = line.split(",");
                    return new Tuple3<String, String, Long>(fields[0], fields[1], Long.parseLong(fields[2]));
                }).returns(Types.TUPLE(Types.STRING, Types.STRING, Types.LONG));

        Schema schema = Schema.newBuilder()
                .column("col1", DataTypes.STRING())
                .column("col2", DataTypes.STRING())
                .column("col3", DataTypes.BIGINT())
                .build();

        Table table = tEnv.fromDataStream(sourceDS);

        tEnv.createTemporaryView("ruleTable", table);


        DataStream<Row> stream = tEnv.toDataStream(table);







        stream.print();

//        tEnv.createTemporaryTable("SourceTable", TableDescriptor.forConnector("kafka")
//                .build());
//        Table sourceTable = tEnv.from("SourceTable");
//
//        DataStream<Row> rowDataStream = tEnv.toDataStream(sourceTable);


//        tEnv.executeSql("create temporary table SinkTable(col1 STRING, col2 STRING, col3 BIGINT) with ('connector' = 'print')");
//
//        tEnv.executeSql("insert into SinkTable select * from SourceTable");

        env.execute();

    }
}
