package com.codejiwei.flink.sql;

import org.apache.flink.connector.datagen.table.DataGenConnectorOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.TableDescriptor;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * author: codejiwei
 * date: 2023/6/8
 * desc:
 **/
public class Flink_Table_SQL_API2 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(5);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        //TODO source table
        TableDescriptor tableDescriptor = TableDescriptor.forConnector("datagen")
                .schema(Schema.newBuilder()
                        .column("f0", DataTypes.STRING())
                        .build())
                .option(DataGenConnectorOptions.ROWS_PER_SECOND, 10L)
                .build();
        tableEnv.createTable("sourceTableA", tableDescriptor);

        tableEnv.createTemporaryTable("sourceTableB", tableDescriptor);

        tableEnv.executeSql("create temporary table sourceTableC(col1 String, col2 bigint, col3 timestamp(3)) " +
                "with ('connector' = 'datagen', " +
                "'rows-per-second' = '10', " +
                "'fields.col1.length' = '5', " +
                "'fields.col2.kind' = 'sequence', " +
                "'fields.col2.start' = '1001', " +
                "'fields.col2.end' = '1005'" +
                ")");



        //TODO sink table
        tableEnv.executeSql("create table sinkTable (sinkCol1 String, sinkCol2 bigint, sinkCol3 timestamp(3)) with ('connector' = 'print')");



        //TODO insert into
        tableEnv.executeSql("insert into sinkTable select col1, col2, col3 from sourceTableC");

//        env.execute();
    }
}
