package com.codejiwei.flink.table;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * author: codejiwei
 * date: 2023/8/7
 * desc: flink table api with schema
 **/
public class Flink_Table_API_04 {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

//        SingleOutputStreamOperator<Row> streamSource = env.readTextFile("src/main/resources/data/ReceiptLog.csv")
//                .map(line -> {
//                    String[] fields = line.split(",");
//                    return Row.of(fields[0], fields[1], Integer.parseInt(fields[2]));
//                }).returns(Types.ROW(Types.STRING, Types.STRING, Types.INT));

        SingleOutputStreamOperator<Tuple3<String, String, Long>> sourceDS = env.readTextFile("src/main/resources/data/ReceiptLog.csv")
                .map(line -> {
                    String[] fields = line.split(",");
                    return new Tuple3<String, String, Long>(fields[0], fields[1], Long.parseLong(fields[2]));
                }).returns(Types.TUPLE(Types.STRING, Types.STRING, Types.LONG));

        Schema schema = Schema.newBuilder()
                .column("col1", DataTypes.STRING())
                .column("col2", DataTypes.STRING())
                .column("col3", DataTypes.BIGINT())
                .build();

        Table table = tEnv.fromDataStream(sourceDS);

        tEnv.createTemporaryView("SourceTable", table);

        tEnv.executeSql("create temporary table SinkTable(col1 STRING, col2 STRING, col3 BIGINT) with ('connector' = 'print')");

        tEnv.executeSql("insert into SinkTable select * from SourceTable");


        System.out.println(table.explain(ExplainDetail.JSON_EXECUTION_PLAN));


    }
}
