package com.atguigu.flinksql.daytest.sql;

import com.atguigu.flinksql.day13.udf.SplitFunction;
import com.atguigu.flinksql.daytest.fun.userUDTF;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * ClassName: Test01_userUDTF
 * Package: com.atguigu.flinksql.daytest
 * Description:
 *        	1.1 使用FlinkSQL方式读取Kafka数据(json:id,vc,str:string)
 * 	        1.2 自定义UDTF实现:
 * 	        	str形如:"hello-world_hello-atguigu"
 * 	        	====>
 * 	        		a列    b列
 * 	        		hello,world
 * 	        		hello,atguigu
 * 	        1.3 使用FlinkSQL方式写出到Kafka
 * @Author ChenJun
 * @Create 2023/4/22 8:41
 * @Version 1.0
 */
public class Test01_userUDTF {
    public static void main(String[] args) {

        //1. 获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2. 使用FlinkSQL方式读取Kafka数据(json:id,vc,str:string)
        tableEnv.executeSql(""+
                "CREATE TABLE kafka_source( \n" +
                "  id string,  \n" +
                "  vc Integer ,\n" +
                "  str string\n" +
                "  )\n" +
                "WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'chen' ,\n" +
                "  'scan.startup.mode' = 'group-offsets' ,\n" +
                "  'sink.partitioner' = 'fixed',\n" +
                "  'topic' = 'test1109',\n" +
                "  'format' = 'json'\n" +
                ")");

        //注册函数
        tableEnv.createTemporarySystemFunction("my_udtf", userUDTF.class);

        //使用函数
        Table table = tableEnv.sqlQuery("" +
                "SELECT \n" +
                "    id,\n" +
                "    vc, \n" +
                "    word1, \n" +
                "    word2\n" +
                "FROM kafka_source, LATERAL TABLE(my_udtf(str))");
        //创建临时表
        tableEnv.createTemporaryView("t1",table);

        //输出到kafka
        tableEnv.executeSql("CREATE TABLE kafka_sink (\n" +
                "  `id` STRING,\n" +
                "  `vc` Integer,\n" +
                "  `word1` STRING,\n" +
                "  `word2` STRING,\n" +
                "   PRIMARY KEY (id) NOT ENFORCED"+
                ") WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = 'test',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")");

        //写入数据
        tableEnv.executeSql("insert into kafka_sink select * from t1");


    }
}
