package edu.nepu.flink.api.sql;

import edu.nepu.flink.api.bean.WaterSensor;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.InputGroup;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;

/**
 * @Date 2024/3/5 21:45
 * @Created by chenshuaijun
 */
public class SelfDefineScalarFunction {


    public static void main(String[] args) {


        /**
         *
         *  标量函数的本质就是一进一出:
         *  下面我将演示自定义标量函数的过程，实现一个求传入参数hashcode值的函数
         */

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStreamSource<WaterSensor> sensorDS = env.fromElements(
                new WaterSensor("s1", 1L, 1),
                new WaterSensor("s1", 2L, 2),
                new WaterSensor("s2", 2L, 2),
                new WaterSensor("s3", 3L, 3),
                new WaterSensor("s3", 4L, 4)
        );

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        //TODO 注意这里创建的是一个临时的系统函数
        tableEnv.createTemporarySystemFunction("myHash",HashFunction.class);

        Table sensorTable = tableEnv.fromDataStream(sensorDS);

        tableEnv.createTemporaryView("sensors",sensorTable);

        tableEnv.sqlQuery("select id,myHash(id) as hash_id from sensors").execute().print();

    }

    public static class HashFunction extends ScalarFunction{
        public int eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object value){
            return value.hashCode();
        }
    }
}
