package com.codejiwei.flink.table;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableDescriptor;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;

import java.util.LinkedList;

/**
 * author: codejiwei
 * date: 2023/8/15
 * desc: flink table api add field
 **/
public class Flink_Table_API_AddFields {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        Schema schema = Schema.newBuilder()
                .column("id", DataTypes.BIGINT())
                .column("name", DataTypes.STRING())
//                .column("price", DataTypes.INT())
                .column("ts", DataTypes.TIMESTAMP(3))
                .watermark("ts", "ts - INTERVAL '3' SECOND")
                .build();
        Table table = tableEnv.from(
                TableDescriptor.forConnector("datagen")
                        .schema(schema)
                        .option("rows-per-second", "1")
//                        .option("number-of-rows", "10")
                        .option("fields.name.length", "5")
                        .option("fields.id.min", "0")
                        .option("fields.id.max", "10")
                        .build()
        );

        DataStream<Row> rowDataStream = tableEnv.toDataStream(table);

        LinkedList<String> fields = new LinkedList<>();
        fields.add("id");
        fields.add("name");
        fields.add("ts");
        fields.add("f1");
        fields.add("f2");

        LinkedList<TypeInformation<?>> types = new LinkedList<>();
        LinkedList<DataType> dataTypes = new LinkedList<>();
        dataTypes.add(DataTypes.BIGINT());
        dataTypes.add(DataTypes.STRING());
        dataTypes.add(DataTypes.TIMESTAMP(3));
        dataTypes.add(DataTypes.STRING());
        dataTypes.add(DataTypes.INT());
        TypeInformation<?>[] typeInformations = TypeConversions.fromDataTypeToLegacyInfo(dataTypes.toArray(new DataType[]{}));

//        types.add(DataTypes.BIGINT().);
//        types.add(Types.CHAR);
//        types.add(Types.LOCAL_DATE_TIME);
//        types.add(Types.STRING);
//        types.add(Types.INT);
//        String[] fields = {"id", "name", "ts", "f1", "f2"};
//        TypeInformation<?>[] types = {Types.INT, Types.STRING, Types.LOCAL_DATE_TIME, Types.STRING, Types.INT};




        SingleOutputStreamOperator<Row> map = rowDataStream.map(new MapFunction<Row, Row>() {
            @Override
            public Row map(Row row) throws Exception {
                Row addRow = Row.of("rule-name", 1);
                Row join = Row.join(row, addRow);
                System.out.println("============" + join);
                return join;
            }
//        }).returns(Types.ROW_NAMED(new String[] {"id", "name", "ts", "f1", "f2"}, Types.INT, Types.STRING, Types.LOCAL_DATE_TIME, Types.STRING, Types.INT));
//        }).returns(Types.ROW_NAMED(fields.toArray(new String[fields.size()]), types.toArray(new TypeInformation<?>[types.size()])));
        }).returns(Types.ROW_NAMED(fields.toArray(new String[fields.size()]), typeInformations));

        Table table1 = tableEnv.fromDataStream(map);

        table1.printSchema();

        tableEnv.createTemporaryView("tempView", table1);

        tableEnv.executeSql("create temporary table SinkTable(id bigint, name string, ts timestamp(3), field1 string, field2 bigint) with ('connector' = 'print')");

        tableEnv.executeSql("insert into SinkTable select * from tempView");

    }
}
