package com.bleeth.flow.step.field;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.util.StrUtil;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.bleeth.flow.core.util.SparkUtil;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.types.DataTypes;

import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-05
 * @description：
 */
@Data
@PluginAnnotation(name = "自定义聚合",
        type = PluginTypeEnum.AGG,
        description = "",
        id = "DictPlugin")
public class DictPlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = -4392400113251268917L;


    private List<DictParamBean> paramList;

    private List<Dataset<Row>> datasetList = new ArrayList<>();

    private Map<String, String> dictMap;



    @Override
    public Dataset<Row> action(Dataset<Row> param, Map<String, WorkerWrapper> allWrappers) {
        super.action(param, allWrappers);

        String fromPluginName = fromList.get(0);
        WorkerWrapper fromWrapper = allWrappers.get(fromPluginName);
        Dataset<Row> ds = (Dataset<Row>) fromWrapper.getWorkResult().getResult();

        UDF1<String,  String> mapUDF = new MapUDF();
        SparkUtil.getSparkInstance().udf().register("map_udf", mapUDF, DataTypes.StringType);

        //处理字典map
        Map<String, Map<String, Object>> mapDictMap = MapUtil.newHashMap();
        for (int i = 0; i < paramList.size(); i++) {
            DictParamBean dictParamBean = paramList.get(i);
            String fieldName = dictParamBean.getFieldName();
            String dictName = dictParamBean.getDictName();
            String sourceType = dictParamBean.getSourceType();//数据来源
            String type = dictParamBean.getType();
            String dictFieldName = dictParamBean.getDictFieldName();

            //加载字典，默认从文件中加载
            dictMap = MapUtil.newHashMap();
            List<String> dictLineList = FileUtil.readUtf8Lines(new File(dictName));
            dictLineList.stream().filter(line -> {
                return StrUtil.isNotEmpty(line) && StrUtil.contains(line, "=");
            }).forEach(line -> {
                List<String> split = StrUtil.split(line, "=");
                String key = split.get(0);
                String value = split.get(1);
                dictMap.put(key, value);
            });

            if (StrUtil.equalsIgnoreCase(type, "ADD")) {
                ds = ds.withColumn(dictFieldName, functions.callUDF("map_udf", ds.col(fieldName)));
            }


            if (StrUtil.equalsIgnoreCase(type, "UPDATE")) {
                ds = ds.withColumn(fieldName, functions.callUDF("map_udf", ds.col(fieldName)));
            }
        }
        return ds;
    }


    @Data
    public static class DictParamBean implements Serializable {
        private static final long serialVersionUID = -4123033677248661858L;

        private String fieldName;

        //ADD 或者 UPDATE
        private String type;

        //选填，ADD下不需要填，UPDATE不填则为${fieldName}_dict
        private String dictFieldName;

        //字典来源
        private String sourceType;

        //采用的字典名称
        private String dictName;
    }


    public class MapUDF implements UDF1<String, String> {
        private static final long serialVersionUID = -3175162002464574460L;

        @Override
        public String call(String key) {
            return dictMap.get(key);
        }
    }


}
