package com.bleeth.flow.step.field;

import cn.hutool.core.convert.Convert;
import cn.hutool.core.map.MapUtil;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.bleeth.flow.core.util.AviatorUtil;
import com.bleeth.flow.core.util.SparkUtil;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;

import java.io.Serializable;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-05
 * @description：自定义函数插件
 */
@Data
@PluginAnnotation(name = "自定义函数",
        type = PluginTypeEnum.FIELD,
        description = "",
        id = "FunctionPlugin")
public class FunctionPlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = -6765945803641420022L;

    private List<FunctionParameter> funcList;

    private String funcContent;



    //中间变量,需要在udf中使用的
    private List<Map<String, Object>> paramList;


    @Override
    public Dataset<Row> action(Dataset<Row> input, Map<String, WorkerWrapper> allWrappers) {
        super.action(input, allWrappers);


        String fromPluginName = fromList.get(0);
        WorkerWrapper fromWrapper = allWrappers.get(fromPluginName);
        Dataset<Row> ds = (Dataset<Row>) fromWrapper.getWorkResult().getResult();

        UDF1<Row, String> functionUDF = new FunctionUDF();
        SparkUtil.getSparkInstance().udf().register("function_udf", functionUDF, DataTypes.StringType);

        StructType schema = ds.schema();
        String[] fieldNames = schema.fieldNames();
        Column[] columns = new Column[schema.size()];
        for (int i = 0; i < fieldNames.length; i++) {
            String fieldName = fieldNames[i];
            columns[i] = functions.col(fieldName);
        }

        for (int i = 0; i < funcList.size(); i++) {
            FunctionParameter functionParameter = funcList.get(i);
            funcContent = functionParameter.getFuncContent();
            String fieldName = functionParameter.getFieldName();
            paramList = functionParameter.getParamList();
            ds = ds.withColumn(
                    fieldName,
                    functions.callUDF("function_udf", functions.struct(columns))
            );

        }
        return ds;
    }

    @Data
    public static class FunctionParameter implements Serializable {

        private static final long serialVersionUID = -15211460127821801L;

        //函数名
        private String funcName;

        private String funcUid;

        //函数内容
        private String funcContent;

        //函数参数，param代表udf中的变量，value代表需要替换变量的值，type=1 代表该值来源于数据本身，0代表实际的值，示例：
        //{
        //  "param":"_date_",
        //  "value":"时间",
        //  "type":1
        //}
        private List<Map<String, Object>> paramList;

        //返回值类型,数据类型同spark
        private String resultType;

        //UPDATE或ADD
        private String funcType;

        //ADD 则为增加一个字段，UPDATE为更新fieldName字段类容
        private String fieldName;
    }

    public class FunctionUDF implements UDF1<Row, String> {

        @Override
        public String call(Row row) {
            //构建参数map
            Map<String, Object> rowMap = MapUtil.newHashMap();
            for (int i = 0; i < paramList.size(); i++) {
                Map<String, Object> paraMap = paramList.get(i);
                Integer type = Convert.toInt(paraMap.get("type"));
                String param = Convert.toStr(paraMap.get("param"));
                Object value = paraMap.get("value");
                if (type == 0) {
                    rowMap.put(param, value);
                } else {
                    Object rowValue = row.getAs(Convert.toStr(value));
                    rowMap.put(param, rowValue);
                }
            }
            Object value = AviatorUtil.exec(funcContent, rowMap);
            if (value == null) {
                return "";
            }
            return value.toString();
        }
    }


}
