package com.sui.bigdata.sml.web.util;

import com.feidee.fd.sml.algorithm.forecast.*;
import com.feidee.fdhadoop.hdfs.HdfsUtils;
import com.sui.bigdata.sml.web.dto.ModelDTO;
import com.sui.bigdata.sml.web.exception.InvalidHdfsPathException;
import com.sui.bigdata.sml.web.exception.InvalidModelTypeException;
import com.sui.bigdata.sml.web.exception.LoadModelException;
import com.sui.bigdata.sml.web.repository.model.FieldInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import scala.collection.JavaConverters;
import scala.collection.Map$;

import java.util.*;

import static com.sui.bigdata.sml.web.util.ModelType.*;

/**
 * @author songhaicheng
 * @date 2020/4/13 14:59
 * @description 测试模型是否能正常加载
 * @reviewer
 */
@Slf4j
public class ModelLoader {

    public static void test(ModelDTO model) {
        if (getType(model.getModelType()) == INVALID) {
            throw new InvalidModelTypeException(model.getModelType());
        }
        // TF 模型暂时不需要做校验
        if (getType(model.getModelType()) == TF) {
            return;
        }
        // 校验模型路径是否存在
        List<String> paths = Arrays.asList(model.getHdfsPath().split(","));
        for (String path : paths) {
            checkModelPath(path);
        }
        log.info("加载模型：{}", model.toString());
        // 构建预测字段信息（Java -> Scala）
        scala.collection.immutable.Map<String, String[]> scalaFieldInfoMap = null;
        if (CollectionUtils.isNotEmpty(model.getFields())) {
            Map<String, String[]> fieldInfoMap = new HashMap<>();
            model.getFields().forEach(f -> fieldInfoMap.put(f.getField(), new String[]{f.getDataType(), f.getValue()}));
            scala.collection.mutable.Map<String, String[]> scalaMap = JavaConverters
                    .mapAsScalaMapConverter(fieldInfoMap)
                    .asScala();
            Object obj = Map$.MODULE$.<String, String[]>newBuilder().$plus$plus$eq(scalaMap);
            Object result = ((scala.collection.mutable.Builder) obj).result();
            scalaFieldInfoMap = (scala.collection.immutable.Map) result;
        }

        // 根据模型类型加载构建 Forecast 对象
        Forecast forecast;
        switch (getType(model.getModelType())) {
            case SPARK:
                try {
                    forecast = new SparkForecast(SparkUtil.getLocalSparkSession(), scalaFieldInfoMap,
                            JavaConverters.asScalaIteratorConverter(paths.iterator()).asScala().toSeq());
                } catch (Exception e) {
                    e.printStackTrace();
                    throw new LoadModelException(model);
                }
                break;
            case PMML:
                try {
                    forecast = new PMMLForecast(SparkUtil.getLocalSparkSession(), scalaFieldInfoMap,
                            JavaConverters.asScalaIteratorConverter(paths.iterator()).asScala().toSeq());
                } catch (Exception e) {
                    e.printStackTrace();
                    throw new LoadModelException(model);
                }
                break;
            case MLEAP:
                try {
                    forecast = new MleapForecast(Constants.TRYOUT_MLEAP_THREAD_NUM, scalaFieldInfoMap,
                            JavaConverters.asScalaIteratorConverter(paths.iterator()).asScala().toSeq());
                } catch (Exception e) {
                    e.printStackTrace();
                    throw new LoadModelException(model);
                }
                break;
            default:
                throw new InvalidModelTypeException(model.getModelType());
        }
        // 根据字段信息构建特征，进行请求测试
        if (CollectionUtils.isNotEmpty(model.getFields())) {
            Map<String, Object> features = new HashMap<>();
            for (FieldInfo fieldInfo : model.getFields()) {
                switch (fieldInfo.getDataType()) {
                    case "int":
                        features.put(fieldInfo.getField(), Integer.valueOf(fieldInfo.getValue()));
                        break;
                    case "double":
                        features.put(fieldInfo.getField(), Double.valueOf(fieldInfo.getValue()));
                        break;
                    default:
                        features.put(fieldInfo.getField(), fieldInfo.getValue());
                }
            }
            try {
                forecast.warmup(Collections.singletonList(features));
            } catch (Exception e) {
                log.info("模型预热出错", e);
                throw e;
            }
        }
    }

    /**
     * 检查该模型路径是否存在于 HDFS 上
     *
     * @param hdfsPath
     */
    public static void checkModelPath(String hdfsPath) {
        if (!HdfsUtils.fileExists(hdfsPath)) {
            throw new InvalidHdfsPathException(hdfsPath);
        }
    }
}
