package com.bleeth.flow.step.agg;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.convert.Convert;
import cn.hutool.core.map.MapUtil;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.bleeth.flow.core.util.AviatorUtil;
import com.bleeth.flow.core.util.SparkUtil;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.types.DataTypes;

import java.io.Serializable;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-05
 * @description：聚合组件
 */
@Data
@PluginAnnotation(name = "自定义聚合",
        type = PluginTypeEnum.AGG,
        description = "",
        id = "AggPlugin")
public class AggPlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = 2263548693030068075L;

    private String groupByKey;

    private String aggKey;

    private AggParameter funcParam;


    //中间变量,需要在udaf中使用的
    private String funcContent;

    //中间变量,需要在udaf中使用的
    private List<Map<String, Object>> paramMapList;


    @Override
    public Dataset<Row> action(Dataset<Row> input, Map<String, WorkerWrapper> allWrappers) {
        String fromPluginName = fromList.get(0);
        WorkerWrapper fromWrapper = allWrappers.get(fromPluginName);
        Dataset<Row> ds = (Dataset<Row>) fromWrapper.getWorkResult().getResult();

        UDF1<Row, Integer> functionUDAF = new FunctionUDAF();
        SparkUtil.getSparkInstance().udf().register("function_udaf", functionUDAF, DataTypes.StringType);

        funcContent = funcParam.getFuncContent();
        String fieldName = funcParam.getFieldName();
        paramMapList = funcParam.getParamMapList();

        List<Column> aggCols = CollUtil.newArrayList();
        aggCols.add(functions.col(aggKey));
        Column expr = functions.expr(aggKey);
       /* ds = ds.groupBy(groupByKey).agg(functions.aggregate(expr,
                functions.lit(0),
                (col1, col2) -> {
                    col1.name(aggKey + "_buffer");
                    col2.name(aggKey + "_input");
                    return functions.callUDF("function_udaf", functions.struct(col1, col2)).name(fieldName);
                }
        ));*/
        return ds;
    }


    @Data
    public static class AggParameter implements Serializable {

        private static final long serialVersionUID = -15211460127821801L;

        //函数名
        private String funcName;

        //函数内容
        private String funcContent;

        //函数参数，param代表udf中的变量，value代表需要替换变量的值，type=1 代表该值来源于数据本身，0代表实际的值，示例：
        //{
        //  "param":"_date_",
        //  "value":"时间",
        //  "type":1
        //}
        private List<Map<String, Object>> paramMapList;

        //返回值类型,数据类型同spark
        private String resultType;

        //UPDATE或ADD
        private String funcType;

        //ADD 则为增加一个字段，UPDATE为更新fieldName字段类容
        private String fieldName;
    }


    public class FunctionUDAF implements UDF1<Row, Integer> {

        @Override
        public Integer call(Row row) {
            //构建参数map
            Map<String, Object> rowMap = MapUtil.newHashMap();
            Map<String, Object> paramMap = paramMapList.get(0);
            Integer type = Convert.toInt(paramMap.get("type"));
            String param = Convert.toStr(paramMap.get("param"));
            Object value = paramMap.get("value");
            if (type == 0) {
                rowMap.put(param, value);
            } else {
                Object rowValue = row.getAs(Convert.toStr(value));
                rowMap.put(param, rowValue);
            }

            Integer result = AviatorUtil.execInt(funcContent, rowMap);
            return result;
        }
    }





}
