package com.sui.bigdata.sml.web.service.impl;

import com.feidee.fd.sml.algorithm.forecast.SparkForecast;
import com.sui.bigdata.sml.web.controller.ModelController;
import com.sui.bigdata.sml.web.dto.ConvertModelDTO;
import com.sui.bigdata.sml.web.dto.ModelDTO;
import com.sui.bigdata.sml.web.exception.EntityNotFoundException;
import com.sui.bigdata.sml.web.exception.InvalidModelTypeException;
import com.sui.bigdata.sml.web.exception.InvalidTokenException;
import com.sui.bigdata.sml.web.repository.mapper.ModelMapper;
import com.sui.bigdata.sml.web.repository.model.FieldInfo;
import com.sui.bigdata.sml.web.repository.model.Model;
import com.sui.bigdata.sml.web.service.ModelService;
import com.sui.bigdata.sml.web.util.Constants;
import com.sui.bigdata.sml.web.util.ModelLoader;
import com.sui.bigdata.sml.web.util.SparkUtil;
import com.sui.bigdata.sml.web.util.Tools;
import lombok.extern.slf4j.Slf4j;
import ma.glasnost.orika.MapperFacade;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.DependsOn;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import scala.collection.JavaConverters;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

import static com.sui.bigdata.sml.web.util.ModelType.*;

/**
 * @author songhaicheng
 * @date 2019/12/17 10:44
 * @description
 * @reviewer
 */
@Service
@DependsOn("disconfConfig")
@Slf4j
public class ModelServiceImpl implements ModelService {
    private static final Logger LOGGER = LoggerFactory.getLogger(ModelServiceImpl.class);
    @Autowired
    private ModelMapper mapper;
    @Autowired
    private MapperFacade mapperFacade;

    @Transactional
    @Override
    public Long add(ModelDTO dto) {
        if (dto.getVersion() != null) {
            // 新增 model，自动生成模型 token
            dto.setToken(Tools.MD5(dto.getHdfsPath()).substring(8, 24));
            // 路径相同的模型
            List<Model> exists = mapper.listByTokenWithVersionAndOnline(dto.getToken(), null, null);
            if (exists.size() > 0 && exists.get(exists.size() - 1).getVersion() == 1) {
                throw new RuntimeException("检测已有版本号为 1 的模型使用了该路径：" + dto.getHdfsPath());
            }
        } else {
            List<Model> oldModels = mapper.listByTokenWithVersionAndOnline(dto.getToken(), null, null);
            if (oldModels.size() == 0) {
                throw new InvalidTokenException(dto.getToken());
            }
            // 下线所有老的模型
            oldModels.parallelStream().forEach(model -> model.setOnline(false));
            mapper.updateBatch(oldModels);
            log.info("老模型更新成功，数量：{}", oldModels.size());
            if (CollectionUtils.isEmpty(dto.getFields())) {
                // 如果没传字段信息，尝试拿老的最新版本的字段信息填充进来
                Model latestOld = oldModels.get(0);
                // 预防 field_infos 列出现 null 或者空字符串的情况
                if (latestOld.getFieldInfos() == null || "".equals(latestOld.getFieldInfos())) {
                    // 没字段信息时，field_infos 列插入 "[]" 字符串
                    dto.setFields(new ArrayList<>());
                } else {
                    List<FieldInfo> fieldInfos = mapperFacade.map(latestOld, ModelDTO.class).getFields();
                    dto.setFields(fieldInfos);
                }
            }
            // 拿到老的最后一个 version
            // 更新 model，版本号在原来已有的最新版本号上 + 1
            int version = oldModels.get(0).getVersion() + 1;
            dto.setVersion(version);
        }
        // 校验模型是否可用
        ModelLoader.test(dto);
        // 默认使用
        dto.setOnline(true);
        Model model = mapperFacade.map(dto, Model.class);
        // 给 userId 占位
        model.setUserId(Constants.DEFAULT_USER_ID);
        mapper.insert(model);
        if (CollectionUtils.isNotEmpty(dto.getFields())) {
            mapper.insertFieldInfo(dto.getToken(), dto.getVersion(), dto.getFields());
        }
        return model.getId();
    }

    @Override
    public ModelDTO get(Long id) {
        Model model = mapper.get(id);
        if (model == null) {
            throw new EntityNotFoundException(Model.class, id);
        }
        return mapperFacade.map(model, ModelDTO.class);
    }

    @Override
    public List<ModelDTO> listByUserId(Long userId) {
        return mapperFacade.mapAsList(mapper.listByUserId(userId), ModelDTO.class);
    }

    @Override
    public List<ModelDTO> listByToken(String token) {
        List<Model> models = mapper.listByTokenWithVersionAndOnline(token, null, null);
        if (models.size() == 0) {
            throw new InvalidTokenException(token);
        }
        return mapperFacade.mapAsList(models, ModelDTO.class);
    }

    @Transactional
    @Override
    public Boolean update(ModelDTO dto) {
        get(dto.getId());
        Model model = mapperFacade.map(dto, Model.class);
        return mapper.updateBatch(Collections.singletonList(model)) == 1;
    }

    @Transactional
    @Override
    public Boolean switchVersion(String token, Integer version) {
        List<Model> models = mapper.listByTokenWithVersionAndOnline(token, null, null);
        if (models.size() == 0) {
            throw new InvalidTokenException(token);
        }
        for (Model model : models) {
            if (model.getVersion().equals(version)) {
                ModelLoader.test(mapperFacade.map(model, ModelDTO.class));
                model.setOnline(true);
            } else {
                model.setOnline(false);
            }
        }
        mapper.updateBatch(models);
        return true;
    }

    @Override
    public Boolean convertSparkModels(ConvertModelDTO convertDTO) {
        String sources = String.join(", ", convertDTO.getSources());
        boolean success = false;
        log.info("Converting {} model at {} to {}", convertDTO.getType(), sources, convertDTO.getDestination());
        // 检测模型路径
        convertDTO.getSources().parallelStream().forEach(ModelLoader::checkModelPath);
        SparkForecast forecast = null;
        try {
            // 加载模型
            forecast = new SparkForecast(SparkUtil.getLocalSparkSession(), null,
                    JavaConverters.asScalaIteratorConverter(convertDTO.getSources().iterator()).asScala().toSeq());
        } catch (Exception e) {
            LOGGER.info("加载模型出错:{}",e);
        }

        log.info("加载模型 [{}] 成功，准备转换成 {} 模型到 {}", sources, convertDTO.getType(), convertDTO.getDestination());
        switch (getType(convertDTO.getType())) {
            case PMML:
                // 调用转换 PMML 模型方法
                try {
                    success = forecast.convertSparkToPMML(convertDTO.getDestination());
                } catch (Throwable e) {
                    log.info("pmml model convert error:", e);
                }
                break;
            case MLEAP:
                // 调用转换 MLEAP 模型方法
                try {
                    log.info("starting convert");
                    success = forecast.convertSparkToBoundle(convertDTO.getDestination());
                    log.info("ending convert");
                } catch (Throwable e) {
                    log.info("mleap model convert error:", e);
                }
                break;
            default:
                throw new InvalidModelTypeException(convertDTO.getType());
        }
        log.info("result:{}", success);
        if (success) {
            log.info("生成 {} 模型成功：{}", convertDTO.getType(), convertDTO.getDestination());
        } else {
            log.info("生成 {} 模型失败：{}", convertDTO.getType(), convertDTO.getDestination());
        }
        return success;
    }

    @Override
    public Boolean delete(String token) {
        List<Model> exists = mapper.listByTokenWithVersionAndOnline(token, null, null);
        if (exists.size() == 0) {
            throw new InvalidTokenException(token);
        }
        int sum = 0;
        for (Model model : exists) {
            sum += mapper.delete(model.getId());
        }
        if (sum < exists.size()) {
            log.error("模型未删除干净，预期删除 {} 条数据，实际删除 {} 条数据", exists.size(), sum);
        }
        boolean success = sum == exists.size();
        if (success) {
            log.warn("模型数据删除成功，token：{}，数量：{}", token, sum);
        } else {
            log.error("模型数据删除失败，token：{}", token);
        }
        return success;
    }

}
