package com.atguigu.flink.chapter10.function;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;

/*
标量函数：
    一进（一行）一出
 */
public class ScalarFunctionDemo {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.executeSql("create table sensor(" +
                "id string," +
                "ts bigint," +
                "vc int" +
                ") with(" +
                "  'connector' = 'kafka', " +
                "  'topic' = 's1', " +
                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                "  'properties.group.id' = 'atguigu', " +
                "  'scan.startup.mode' = 'latest-offset', " +
                "  'format' = 'csv' " +
                ")");

        Table table = tEnv.from("sensor");

        //把字符串变成大写的功能
        //1、注册一个自定义函数
        tEnv.createTemporaryFunction("my_to_upper",MyToUpper.class);


        //2、在Sql中使用
        tEnv.sqlQuery("select id,my_to_upper(id) from sensor").execute().print();
    }

    public static class MyToUpper extends ScalarFunction {
        //约定方法
        //方法名必须是：eval
        //参数和返回值根据实际需要来实现
        public String eval(String s){
            if (s != null){
                return s.toUpperCase();
            }
            return null;
        }
    }
}




//public class ScalarFunctionDemo {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//
//        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
//        tEnv.executeSql("create table sensor(" +
//                "id string," +
//                "ts bigint," +
//                "vc int" +
//                ") with(" +
//                "  'connector' = 'kafka', " +
//                "  'topic' = 's1', " +
//                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
//                "  'properties.group.id' = 'atguigu', " +
//                "  'scan.startup.mode' = 'latest-offset', " +
//                "  'format' = 'csv' " +
//                ")");
//
//        Table table = tEnv.from("sensor");
//
//
//        //1、注册一个自定义函数
//        tEnv.createTemporaryFunction("my_to_upper",My_To_Upper.class);
//
//        //2、在Sql中使用
//        tEnv.sqlQuery("select id,my_to_upper(vc) from sensor").execute().print();
//    }
//
//    private static class My_To_Upper extends ScalarFunction{
//        //约定方法
//        //方法名必须是：eval
//        //参数和返回值根据实际需求来实现
//        public String eval(String s){
//            if (s != null){
//                return s.toUpperCase();
//            }
//            return null;
//        }
//    }
//}
