package sql;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.sql.Timestamp;
import java.util.UUID;

/**
 arrayJoin
 */
public class D15_typeTest {


  public static void main(String[] args) throws InterruptedException {


    Configuration flinkConf = new Configuration();
    flinkConf.setString("rest.port","9091");
    flinkConf.setString("$internal.pipeline.job-id", "c0e67372c9136321a83c49257cf79999");
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(flinkConf);
    env.setParallelism(1);
    StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


    String useUdf = "CREATE FUNCTION split AS 'functions.Split'";

    String genSql = "CREATE TABLE ods_tb ( " +
        " aa STRING" +
        ") WITH ( " +
        "  'connector' = 'kafka'," +
        "  'topic' = 'test'," +
        "  'properties.bootstrap.servers' = 'kafka:9092'," +
        "  'properties.group.id' = '" + UUID.randomUUID().toString() +"'," +
        "  'scan.startup.mode' = 'latest-offset'," +
        "  'format' = 'json'" +
        ")";


    String sinkPrint = "CREATE TABLE print (" +
        "    aa STRING, " +
        "    bb INTEGER " +
        ") WITH (" +
        "     'connector' = 'print'" +
        ")";

    String sql = "INSERT INTO print " +
        " SELECT " +
        " aa," +
        " CAST(aa as int) as bb" +
        " FROM ods_tb";


    tableEnv.executeSql(useUdf);
    tableEnv.executeSql(genSql);
    tableEnv.executeSql(sinkPrint);
    tableEnv.executeSql(sql);

    /**
     *
     +I[0, 0]
     +I[1, 1]
     +I[1.0, 1]
     +I[-0.001, 0]
     +I[3.1415, 3]
     +I[3.999999, 3]
     +I[null, null]

     */



    System.out.println(new Timestamp(System.currentTimeMillis()));



  }
}
