package com.apex.flink.transform.functions;

import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.catalog.DataTypeFactory;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.table.types.inference.TypeInference;
import org.apache.flink.types.Row;

import java.util.Optional;

/**
 * 返回值为Row
 */
public class ParseUdtfRow extends TableFunction<Row> {
    public void eval(String str) {
        String[] split = str.split(",");
        String first = split[0];
        long second = Long.parseLong(split[1]);
        int third = Integer.parseInt(split[2]);
        Row row = new Row(3);
        row.setField(0, first);
        row.setField(1, second);
        row.setField(2, third);
        collect(row);
    }

    // 如果返回值是Row，则必须重载实现getResultType方法，显式地声明返回的字段类型。

    @Override
    public TypeInference getTypeInference(DataTypeFactory typeFactory) {
        return TypeInference.newBuilder()
                // 指定输入参数的类型，必要时参数会被隐式转换
                .typedArguments(DataTypes.STRING(), DataTypes.STRING())
                // specify a strategy for the result data type of the function
                .outputTypeStrategy(callContext -> {
                    if (!callContext.isArgumentLiteral(1) || callContext.isArgumentNull(1)) {
                        throw callContext.newValidationError("Literal expected for second argument.");
                    }
                    // 基于字符串值返回数据类型
                    final String literal = callContext.getArgumentValue(1, String.class).orElse("STRING");
                    switch (literal) {
                        case "INT":
                            return Optional.of(DataTypes.INT().notNull());
                        case "DOUBLE":
                            return Optional.of(DataTypes.DOUBLE().notNull());
                        case "STRING":
                        default:
                            return Optional.of(DataTypes.STRING());
                    }
                })
                .build();
    }
}
