package cn.getech.data.development.service.impl;

import cn.getech.data.development.config.properties.BdpModelConfig;
import cn.getech.data.development.config.properties.HiveConfig;
import cn.getech.data.development.constant.DBTypeEnum;
import cn.getech.data.development.constant.DataDevelopmentBizExceptionEnum;
import cn.getech.data.development.constant.LastRunState;
import cn.getech.data.development.entity.*;
import cn.getech.data.development.mapper.*;
import cn.getech.data.development.model.qo.ModelInfoQO;
import cn.getech.data.development.model.vo.ModelInfoVO;
import cn.getech.data.development.service.ModelInfoService;
import cn.getech.data.development.utils.HdfsUserUtil;
import cn.getech.data.development.utils.HdfsUtil;
import cn.getech.data.development.utils.HiveTableUtil;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.FileUtils;
import cn.getech.data.intelligence.common.utils.PageBean;
import cn.getech.data.intelligence.common.utils.PageUtils;
import cn.getech.data.intelligence.common.utils.QueryBean;
import cn.getech.system.center.utils.ShiroUtils;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang.StringUtils;
import org.jpmml.evaluator.Evaluator;
import org.jpmml.evaluator.InputField;
import org.jpmml.evaluator.TargetField;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;

/**
 * <p>
 * 模型表 服务实现类
 * </p>
 *
 * @author hqz
 * @since 2019-08-22
 */
@Service
public class ModelInfoServiceImpl extends ServiceImpl<ModelInfoMapper, ModelInfo> implements ModelInfoService {

    @Resource
    ModelInfoMapper modelInfoMapper;
    @Resource
    ModelDetailsMapper modelDetailsMapper;
    @Resource
    ModelTrainMapper modelTrainMapper;
    @Resource
    TableInfoMapper tableInfoMapper;
    @Resource
    TableFieldInfoMapper tableFieldInfoMapper;

    @Autowired
    BdpModelConfig bdpModelConfig;
    @Autowired
    HiveConfig hiveConfig;

    @Resource
    private AnalysisInfoMapper analysisInfoMapper;

    @Autowired
    private HdfsUserUtil hdfsUserUtil;

    @Override
    public PageUtils queryPage(ModelInfoQO modelInfoQO) {
        String modelName = modelInfoQO.getName();
        IPage<ModelInfo> page = modelInfoMapper.selectPage(
                new QueryBean<ModelInfo, PageBean>().getPage(modelInfoQO),
                new QueryWrapper<ModelInfo>().like(StringUtils.isNotBlank(modelName), "name", modelName)
                        .eq("is_delete", false)
        );
        return new PageUtils(page);
    }

    /**
     * 编辑模型时验证
     *
     * @param modelInfoVO
     */
    @Override
    public void checkUpdateInfo(ModelInfoVO modelInfoVO) {
        Integer modelId = modelInfoVO.getId();
        if (StringUtils.isBlank(String.valueOf(modelId))) {
            throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_IN_MYSQLTABLE_NULL.getMessage());
        }
        if (modelInfoMapper.selectById(modelId) == null) {
            throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_NOT_EXIST_IN_MYSQL.getMessage());
        }
        checkUploadInfo(modelInfoVO);
    }

    /**
     * 上传模型文件时验证模型名称和模型描述
     *
     * @param modelInfoVO
     */
    @Override
    public void checkUploadInfo(ModelInfoVO modelInfoVO) {
        if (StringUtils.isBlank(modelInfoVO.getName())) {
            throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_NAME_NULL.getMessage());
        }
        if (StringUtils.isBlank(modelInfoVO.getDecr())) {
            throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_DESC_NULL.getMessage());
        }
    }


    /**
     * 编辑模型
     *
     * @param file
     * @param modelInfoVO
     */
    @Override
    public void updateFile(MultipartFile file, ModelInfoVO modelInfoVO) {
        String fileName = "";
        String finalHdfsUrl = "";
        ModelInfo modelInfo11 = new ModelInfo();
        BeanUtils.copyProperties(modelInfoVO, modelInfo11);
        if (file != null) {//上传新的模型
            fileName = file.getOriginalFilename();
            HdfsUtil hdfsUtil = null;
            String hdfsRPCUrl = "hdfs://" + bdpModelConfig.getNamespace() + ":" + bdpModelConfig.getNameport();
            Integer modelInfoId = modelInfoVO.getId();//modelId已经检验过不会为空
            if (fileName != null) {
                finalHdfsUrl = String.format("%s%s/%s", hdfsRPCUrl, bdpModelConfig.getModelconfig(), fileName.substring(0, fileName.lastIndexOf(".")) + "_" + modelInfoId + "." + fileName.substring(fileName.lastIndexOf(".") + 1));
            }
            ModelInfo modelInfo1 = modelInfoMapper.selectOne(new QueryWrapper<ModelInfo>().eq(StringUtils.isNotEmpty(String.valueOf(modelInfoId)), "id", modelInfoId));
            if (modelInfo1 == null) {
                throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_NOT_EXIST_IN_MYSQL.getMessage());
            }
            Integer is_delete = modelInfo1.getIsDelete();
            try {
                hdfsUtil = new HdfsUtil(bdpModelConfig);
            } catch (Exception e) {
                throw new RRException(DataDevelopmentBizExceptionEnum.HDFS_CONFIG_ERROR.getMessage());
            }
            String primaryHdfsUrl = modelInfo1.getHdfsUrl();//MySQL中
            if (primaryHdfsUrl == null) {
                throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_SAVE_ERROR_IN_HDFS.getMessage());
            }
            //数仓删除——数仓写入——modelDetails根据modelInfoId删除——modelInfo更新——modelDetails重新写入
            try {
                if (is_delete == 0) {
                    hdfsUtil.delete(primaryHdfsUrl);
                }
                hdfsUtil.writeFile(file.getBytes(), finalHdfsUrl);
            } catch (IOException e) {
                throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_SAVE_ERROR_IN_HDFS.getMessage());
            }finally {
                if(null != hdfsUtil){
                    hdfsUtil.close();
                }
            }
            modelDetailsMapper.delete(new QueryWrapper<ModelDetails>().eq("model_info_id", modelInfoId));
            modelInfo11.setModTime(DateUtil.date());
            modelInfo11.setModPer(ShiroUtils.getUserId().intValue());
            modelInfo11.setHdfsUrl(finalHdfsUrl);
            modelInfoMapper.updateById(modelInfo11);
            getModelIOInfo(file, modelInfoId);
        } else {
            modelInfo11.setModTime(DateUtil.date());
            modelInfo11.setModPer(ShiroUtils.getUserId().intValue());
            modelInfoMapper.updateById(modelInfo11);
        }
    }


    /**
     * 上传模型
     *
     * @param file
     * @param modelInfoVO
     */
    @Override
    public void uploadFile(MultipartFile file, ModelInfoVO modelInfoVO) {
        String fileName = "";
        String hdfsRPCUrl = "hdfs://" + bdpModelConfig.getNamespace() + ":" + bdpModelConfig.getNameport();
        if (file != null) {
            fileName = file.getOriginalFilename();
            ModelInfo modelInfo1 = new ModelInfo();
            BeanUtils.copyProperties(modelInfoVO, modelInfo1);
            modelInfo1.setCreateTime(DateUtil.date());
            modelInfo1.setCreatePer(ShiroUtils.getUserId().intValue());
            modelInfo1.setModelSuffix("pmml");
//            modelInfo1.setName(modelInfo.getName());
//            modelInfo1.setDecr(modelInfo.getDecr());
            modelInfo1.setIsDelete(1);
            modelInfoMapper.insert(modelInfo1);
            Integer modelId = modelInfo1.getId();
            String finalHdfsUrl = String.format("%s%s/%s", hdfsRPCUrl, bdpModelConfig.getModelconfig(), fileName.substring(0, fileName.lastIndexOf(".")) + "_" + modelId + "." + fileName.substring(fileName.lastIndexOf(".") + 1));
            HdfsUtil hdfsUtil = null;
            try {
                hdfsUtil = new HdfsUtil(bdpModelConfig);
            } catch (Exception e) {
                throw new RRException(DataDevelopmentBizExceptionEnum.HDFS_CONFIG_ERROR.getMessage()+e.getMessage());
            }
            try {
                if (finalHdfsUrl != null) {
                    hdfsUtil.writeFile(file.getBytes(), finalHdfsUrl);
                    ModelInfo modelInfo2 = new ModelInfo();
                    modelInfo2.setHdfsUrl(finalHdfsUrl);
                    modelInfo2.setIsDelete(0);
                    modelInfoMapper.update(modelInfo2, new QueryWrapper<ModelInfo>().eq("id", modelId));
                    getModelIOInfo(file, modelId);
                }
            } catch (IOException e) {
                throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_SAVE_ERROR_IN_HDFS.getMessage());
            }finally {
                if(null != hdfsUtil){
                    hdfsUtil.close();
                }
            }
        }
    }

    /**
     * 获取选中的模型文件的输入和输出字段
     *
     * @param id
     * @return
     */
    @Override
    public Map getModelDetials(String id) {
        if (StringUtils.isBlank(id)) {
            throw new RRException(DataDevelopmentBizExceptionEnum.PARAM_NOT_FOUND.getMessage());
        }
        //TODO 由于在上传模型时就对模型进行了解析判断是否至少有一个输入和输出，要添加根据model_info_id查询是否有模型的输入和输出信息
        List<ModelDetails> modelDetailsList = modelDetailsMapper.selectList(new QueryWrapper<ModelDetails>().eq("model_info_id", Integer.valueOf(id)));
        if (modelDetailsList == null) {
            throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_SAVE_ERROR_IN_MYSQL.getMessage());
        }
        JSONArray inputTable = new JSONArray();
        JSONArray outputTable = new JSONArray();
        Map<String, Object> result = new HashMap<>();
        ModelInfo modelInfo1 = modelInfoMapper.selectById(Integer.valueOf(id));
        String hdfsUrl = modelInfo1.getHdfsUrl();
        String fileName = hdfsUrl.substring(hdfsUrl.lastIndexOf("/") + 1);
        String primaryFileName = fileName.substring(0, fileName.lastIndexOf("_")) + "." + fileName.substring(fileName.lastIndexOf(".") + 1);
        result.put("name", modelInfo1.getName());
        result.put("decr", modelInfo1.getDecr());
        result.put("fileName", primaryFileName);
        for (ModelDetails m : modelDetailsList
        ) {
            JSONObject jsonObject = new JSONObject();
            if (m.getModelType() == 1) {
                jsonObject.put("inputName", m.getModelField());
                jsonObject.put("inputDataType", m.getModelFieldType());
                inputTable.add(jsonObject);
                result.put("inputTable", inputTable);
            } else if (m.getModelType() == 2) {
                jsonObject.put("outputName", m.getModelField());
                jsonObject.put("outputDataType", m.getModelFieldType());
                outputTable.add(jsonObject);
                result.put("outputTable", outputTable);
            }
        }
        return result;
    }


    /**
     * 删除模型文件
     *
     * @param id
     */
    @Override
    public void deleteModle(Integer id) {
        if (id == null) {
            throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_IN_MYSQLTABLE_NULL.getMessage());
        }
        ModelInfo modelInfo1 = modelInfoMapper.selectById(id);
        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpModelConfig);
        } catch (Exception e) {
            e.printStackTrace();
        }
        String hdfsUrl = modelInfo1.getHdfsUrl();
        if (hdfsUtil.delete(hdfsUrl)) {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
            try {
                modelInfo1.setIsDelete(1);
                modelInfoMapper.updateById(modelInfo1);
            } catch (Exception e) {
                if(null != hdfsUtil){
                    hdfsUtil.close();
                }
                throw new RRException(DataDevelopmentBizExceptionEnum.MODEL_DELETE_ERROR.getMessage());
            }
        }
        if(null != hdfsUtil){
            hdfsUtil.close();
        }
    }


    /**
     * 下载模型文件
     *
     * @param modelInfo
     */
    @Override
    public BufferedInputStream downloadModel(ModelInfo modelInfo, HttpServletResponse response) throws Exception {
        HdfsUtil hdfsUtil = null;
        hdfsUtil = new HdfsUtil(bdpModelConfig);
        String hdfsUrl = modelInfo.getHdfsUrl();
//        String fileName = hdfsUrl.substring(hdfsUrl.lastIndexOf("/") +1);
//        String primaryFileName = fileName.substring(0,fileName.lastIndexOf("_"));
        BufferedInputStream bufferedInputStream = hdfsUtil.readFile(hdfsUrl, response);
        System.out.println(bufferedInputStream.available());
        return bufferedInputStream;
    }


    /**
     * 获取hdfs中各个数据库中下的table名，为前端生成二级菜单数据
     *
     * @return
     */
    @Override
    public List getDbTableList() throws Exception {
        String dirHome = "/user/hive/warehouse/";
        String dbName = "";
        List<String> tableNameList = null;
        JSONArray jsonArray = new JSONArray();
        DBTypeEnum[] dbTypeEnums = DBTypeEnum.values();
        HdfsUtil hdfsUtil = new HdfsUtil(bdpModelConfig);
        for (DBTypeEnum db : dbTypeEnums) {
            JSONObject jsonObject = new JSONObject();
            dbName = db.getName();
            jsonObject.put("value", dbName);
            jsonObject.put("label", dbName);
            JSONArray jsonArray1 = new JSONArray();
            tableNameList = hdfsUtil.dbTableNameList(dirHome + dbName + ".db");
            if (tableNameList != null) {
                for (String s : tableNameList) {
                    JSONObject jsonObject1 = new JSONObject();
                    jsonObject1.put("value", s);
                    jsonObject1.put("label", s);
                    jsonArray1.add(jsonObject1);
                }
            } else {
                JSONObject jsonObject1 = new JSONObject();
                jsonArray1.add(jsonObject1);
            }
            jsonObject.put("children", jsonArray1);
            jsonArray.add(jsonObject);
        }
        if(null != hdfsUtil){
            hdfsUtil.close();
        }
        return jsonArray;
    }


    /**
     * 从指定数仓的表中获取数据,每个用户最多只能有一个用于训练模型的数据
     *
     * @param sql
     */
    @Override
    public void modelTrainData(String sql) {
        if (StringUtils.isBlank(sql)) {
            throw new RRException(DataDevelopmentBizExceptionEnum.PARAM_NOT_FOUND.getMessage());
        }
        Integer userId = ShiroUtils.getUserId().intValue();
        ModelTrain modelTrain = new ModelTrain();
        modelTrain.setSql(sql);
        modelTrain.setCreatePer(userId);
        modelTrain.setCreateTime(DateUtil.date());
        modelTrain.setState(LastRunState.RUNNING.getCode());
        String resultTableName = "data" + "_" + userId.toString();
        modelTrain.setResultTableName(resultTableName);

        ModelTrain modelTrain2 = modelTrainMapper.selectOne(new QueryWrapper<ModelTrain>().eq("result_table_name", resultTableName));
        if (modelTrain2 == null) {
            modelTrainMapper.insert(modelTrain);
        } else {
            modelTrainMapper.update(modelTrain, new QueryWrapper<ModelTrain>().eq("result_table_name", resultTableName));
        }
        try {
            new Thread(() -> {
                HiveTableUtil hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),"hdfs","hdfs");
                try {
                    hiveTableUtil.execute("CREATE DATABASE IF NOT EXISTS " + hiveConfig.getTrainDB());
                    hiveTableUtil.execute(String.format("DROP TABLE IF EXISTS %s.%s ", hiveConfig.getTrainDB(), resultTableName));
                    StringBuffer sb = new StringBuffer();
                    sb.append(sql);
                    sb.append(" ");
                    sb.append("limit 10000");

                    hiveTableUtil.execute(String.format("CREATE TABLE %s.%s\n" +
                            "   ROW FORMAT SERDE \"org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe\"\n" +
                            "   STORED AS RCFile\n" +
                            "   AS\n" +
                            "%s", hiveConfig.getTrainDB(), resultTableName, sb.toString()));
                    ModelTrain modelTrain1 = modelTrainMapper.selectOne(new QueryWrapper<ModelTrain>().eq("result_table_name", resultTableName));
                    if (modelTrain1 != null) {
                        modelTrain1.setState(LastRunState.SUCCESS.getCode());
                        modelTrain1.setMsg("success");
                        modelTrainMapper.updateById(modelTrain1);
                    }
                } catch (SQLException e) {
                    ModelTrain modelTrain1 = modelTrainMapper.selectOne(new QueryWrapper<ModelTrain>().eq("result_table_name", resultTableName));
                    if (modelTrain1 != null) {
                        modelTrain1.setState(LastRunState.FAIL.getCode());
                        modelTrain1.setMsg(e.getMessage());
                        modelTrainMapper.updateById(modelTrain1);
                    }
                    throw new RRException(DataDevelopmentBizExceptionEnum.SERVER_ERROR.getMessage());
                }
            }).start();
        } catch (Exception e) {
            throw new RRException(DataDevelopmentBizExceptionEnum.SERVER_ERROR.getMessage());
        }

    }

    /**
     * 获取当前用户的db和list
     */
    @Override
    public JSONArray dbTableList(Integer type) {
        JSONArray array = new JSONArray();
        for (DBTypeEnum dbNameE : DBTypeEnum.values()
        ) {
            JSONObject dbJson = new JSONObject();
            dbJson.put("label", dbNameE.getName());
            dbJson.put("value", dbNameE.getName());
            JSONArray tableArray = new JSONArray();
            dbJson.put("children", tableArray);
            array.add(dbJson);
            List<TableInfo> tableList = tableInfoMapper.selectList(new QueryWrapper<TableInfo>()
                    .eq("create_per", ShiroUtils.getUserId())
                    .eq("db_name", dbNameE.getName()));
            for (TableInfo tableInfo : tableList
            ) {
                JSONObject tableObj = new JSONObject();
                tableObj.put("label", tableInfo.getTableName());
//                if(2 == type){
                tableObj.put("value", tableInfo.getId());
//                }else{
//                    tableObj.put("value", tableInfo.getTableName());
//                }
                tableObj.put("tableInfoId", tableInfo.getId());
                tableArray.add(tableObj);
            }
        }
        return array;
    }


    /**
     * 第二步确认
     *
     * @return
     */
    @Override
    public JSONArray definiteDbTableTree(Integer tableInfoId) {
        if (tableInfoId == null) {
            throw new RRException(DataDevelopmentBizExceptionEnum.PARAM_CANT_NULL.getMessage());
        }
        Map<String, Object> params = new HashMap<>();
        TableInfo tableInfo = tableInfoMapper.selectById(tableInfoId);
        if (!tableInfoId.equals(tableInfo.getId())) {
            throw new RRException(DataDevelopmentBizExceptionEnum.SERVER_ERROR.getMessage());
        }
        params.put("userId", ShiroUtils.getUserId().intValue());
        JSONArray tree = new JSONArray();
        JSONObject dbJson = new JSONObject();
        dbJson.put("label", tableInfo.getDbName());
        JSONArray tableArray = new JSONArray();
        dbJson.put("children", tableArray);
        tree.add(dbJson);
        JSONObject tableObj = new JSONObject();
        tableObj.put("label", tableInfo.getTableName());
        tableArray.add(tableObj);
        JSONArray fieldArray = new JSONArray();
        tableObj.put("children", fieldArray);
        List<TableFieldInfo> tableFieldInfos = tableFieldInfoMapper.selectList(new QueryWrapper<TableFieldInfo>().eq("table_id", tableInfoId).orderByAsc("field_order", "is_partition"));
        for (TableFieldInfo fieldInfo : tableFieldInfos
        ) {
            JSONObject fieldObj = new JSONObject();
            fieldObj.put("label", fieldInfo.getFieldName());
            fieldObj.put("value", fieldInfo.getFieldName());
            fieldArray.add(fieldObj);
        }
        return tree;
    }

    /**
     * 解析PMML文件  用于上传模型文件和更新模型文件时
     *
     * @param file        模型文件
     * @param modelInfoId modelInfo表格的id
     * @return
     * @throws IOException
     */
    public Map getModelIOInfo(MultipartFile file, Integer modelInfoId) {
        Evaluator evaluator = FileUtils.getFileEvaluator(file);
        List<InputField> inputFields = evaluator.getInputFields();
        List<TargetField> targetFields = evaluator.getTargetFields();
        List<JSONObject> inputTable = new ArrayList<JSONObject>();
        List<JSONObject> outputTable = new ArrayList<JSONObject>();

        Map<String, Object> result = new HashMap<>();
        for (InputField inputField : inputFields) {
            JSONObject jsonObject = new JSONObject();
            jsonObject.put("inputName", inputField.getName().getValue());
            jsonObject.put("inputDataType", inputField.getDataType());
            inputTable.add(jsonObject);
            ModelDetails modelDetails = new ModelDetails();
            modelDetails.setModelInfoId(modelInfoId);
            modelDetails.setModelType(1);
            modelDetails.setModelField(inputField.getName().getValue());
            modelDetails.setModelFieldType(inputField.getDataType().value());
            modelDetailsMapper.insert(modelDetails);
        }
        for (TargetField targetField : targetFields) {
            JSONObject jsonObject = new JSONObject();
            jsonObject.put("outputName", targetField.getName().getValue());
            jsonObject.put("outputDataType", targetField.getDataType());
            outputTable.add(jsonObject);
            ModelDetails modelDetails = new ModelDetails();
            modelDetails.setModelInfoId(modelInfoId);
            modelDetails.setModelType(2);
            modelDetails.setModelField(targetField.getName().getValue());
            modelDetails.setModelFieldType(targetField.getDataType().value());
            modelDetailsMapper.insert(modelDetails);
        }
        result.put("inputTable", inputTable);
        result.put("outputTable", outputTable);
        return result;
    }

    @Override
    public void checkedAnalysisInfo(Integer id, int type) {
        List<AnalysisInfo> analysisInfos = analysisInfoMapper.selectList(new QueryWrapper<AnalysisInfo>().eq("model_info_id", id));
        if (type == 1) {
            if (null != analysisInfos && analysisInfos.size() > 0) {
                throw new RRException("有关联的分析任务，则不能删除！");
            }
        }
        if (type == 2) {
            if (null != analysisInfos && analysisInfos.size() > 0) {
                long count = analysisInfos.stream().filter(analysisInfo -> Objects.equals(true, analysisInfo.getEnable())).count();
                if (count > 0) {
                    throw new RRException("有正在运行的分析任务，则不能编辑！");
                }
            }
        }

    }

}
