package com.atguigu.flink.chapter10.function;


import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.TableFunction;

/*
制表函数：tableFunction
    一进多出  udtf

    "hello word"
    "hello word"        hello   5
    "hello word"        word    4
"hello atguigu world"
    hello atguigu world        hello    5
    hello atguigu world        atguigu  7
    hello atguigu world       word     4
 */
public class TableFunctionDemo {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.executeSql("create table sensor(" +
                "id string," +
                "ts bigint," +
                "vc int" +
                ") with(" +
                "  'connector' = 'kafka', " +
                "  'topic' = 's1', " +
                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                "  'properties.group.id' = 'atguigu', " +
                "  'scan.startup.mode' = 'latest-offset', " +
                "  'format' = 'csv' " +
                ")");

        Table table = tEnv.from("sensor");

        //1、注册一个自定义函数
        tEnv.createTemporaryFunction("my_split",MySplit.class);

        //2、在Table api中使用

        //3、在Sql中使用
        /*tEnv.sqlQuery("select " +
                "id,word,len " +
                "from sensor " +
                "left join lateral table(my_split(id)) on true")
                .execute()
                .print();
         */


        tEnv.sqlQuery("select " +
                "id,word,len " +
                "from sensor " +
                ",lateral table(my_split(id))")
                .execute()
                .print();
    }


    /*public static class MySplit extends TableFunction<WordLen> {
        public void eval(String s){
            if (s != null){
                if (s.length() < 10){
                    return;
                }

                String[] words = s.split(" ");

                for (String word : words) {
                    //这个方法调用一次，相当于一行
                    collect(new WordLen(word,word.length()));
                }
            }
        }
    }
     */

    @FunctionHint(output = @DataTypeHint("row<word string,len int>"))
    public static class MySplit extends TableFunction<WordLen> {
        public void eval(String s){
            if (s != null){
                if (s.length() < 10){
                    return;
                }

                String[] words = s.split(" ");

                for (String word : words) {
                    //这个方法调用一次，相当于一行
                    collect(new WordLen(word,word.length()));
                }
            }
        }
    }

    public static class WordLen {
        public String word;
        public Integer len;

        public WordLen(String word, Integer len) {
            this.word = word;
            this.len = len;
        }
    }
}


//public class TableFunctionDemo {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
//        tEnv.executeSql("create table sensor(" +
//                "id string," +
//                "ts bigint," +
//                "vc int" +
//                ") with(" +
//                "  'connector' = 'kafka', " +
//                "  'topic' = 's1', " +
//                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
//                "  'properties.group.id' = 'atguigu', " +
//                "  'scan.startup.mode' = 'latest-offset', " +
//                "  'format' = 'csv' " +
//                ")");
//
//
//        Table table = tEnv.from("sensor");
//
//        //1、注册一个自定义函数
//        tEnv.createTemporaryFunction("my_split",MySplit.class);
//
//        //2、在Table api中使用
//
//        //3、在sql中使用
//
//        /*tEnv.sqlQuery("select " +
//                "id,word,len " +
//                "from sensor " +
//                "left join lateral table(my_split(id)) on true");
//         */
//
//        tEnv.sqlQuery("select " +
//                "id,word,len " +
//                "from sensor " +
//                ",lateral table(my_split(id))");
//
//    }
//
//
////    public static class MySplit extends TableFunction<WordLen>{
////        public void eval(String s){
////            if (s != null){
////                if (s.length() < 0){
////                    return;
////                }
////
////                String[] words = s.split(" ");
////
////                for (String word : words) {
////                    //这个方法调用一次，就输出一行
////                    collect(new WordLen(word, words.length));
////                }
////            }
////        }
////    }
//
//    @FunctionHint(output = @DataTypeHint("row<id,word,len>"))
//    public static class MySplit extends TableFunction<WordLen>{
//        public void eval(String s){
//            if (s != null){
//                if (s.length() < 0){
//                    return;
//                }
//
//                String[] words = s.split(" ");
//
//                for (String word : words) {
//                    //这个方法调用一次，就输出一行
//                    collect(new WordLen(word, words.length));
//                }
//            }
//        }
//    }
//
//    public static class WordLen {
//        public String word;
//        public Integer len;
//
//        public WordLen(String word, Integer len) {
//            this.word = word;
//            this.len = len;
//        }
//    }
//}



//public class TableFunctionDemo {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//
//        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
//
//        tEnv.executeSql("create table sensor(" +
//                "id string," +
//                "ts bigint," +
//                "vc int" +
//                ") with(" +
//                "  'connector' = 'kafka', " +
//                "  'topic' = 's1', " +
//                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
//                "  'properties.group.id' = 'atguigu', " +
//                "  'scan.startup.mode' = 'latest-offset', " +
//                "  'format' = 'csv' " +
//                ")");
//
//
//        Table table = tEnv.from("sensor");
//
//        //1、注册一个自定义函数
//        tEnv.createTemporaryFunction("my_split",MySplit.class);
//
//        //2、在Table api中使用
//
//        //3、在sql中使用
//
//        /*tEnv.sqlQuery("select " +
//                "id,word,len " +
//                "from sensor " +
//                "left join lateral table(my_split(id)) on true");
//         */
//
//        tEnv.sqlQuery("select " +
//                "id,word,len " +
//                "from sensor " +
//                ",lateral table(my_split(id))");
//
//
//    }
//
////    public static class MySplit extends TableFunction<WordLen>{
////        public void eval(String s){
////            if (s != null){
////                if (s.length() < 0){
////                    return;
////                }
////
////                String[] words = s.split(" ");
////
////                for (String word : words) {
////                    //调用一次这个方法，相当于一行
////                    collect(new WordLen(word, words.length));
////                }
////            }
////        }
////    }
//
//    @FunctionHint(output = @DataTypeHint("row<id,word,len>"))
//    public static class MySplit extends TableFunction<WordLen>{
//        public void eval(String s){
//            if (s != null){
//                if (s.length() < 0){
//                    return;
//                }
//
//                String[] words = s.split(" ");
//
//                for (String word : words) {
//                    //调用一次这个方法，相当于一行
//                    collect(new WordLen(word, words.length));
//                }
//            }
//        }
//    }
//
//    public static class WordLen {
//        public String word;
//        public Integer len;
//
//        public WordLen(String word, Integer len) {
//            this.word = word;
//            this.len = len;
//        }
//    }
//}









































































