package com.zhny.project.txt.compute.service.impl;

import com.zhny.algorithm.sql.NaiveBayesSQLUtil;
import com.zhny.algorithm.sql.RandomForestClassificationSQLUtil;
import com.zhny.common.constant.UserConstants;
import com.zhny.common.utils.DateUtils;
import com.zhny.common.utils.StringUtils;
import com.zhny.common.utils.security.ShiroUtils;
import com.zhny.common.utils.text.Convert;
import com.zhny.framework.config.RuoYiConfig;
import com.zhny.project.txt.calculation.domain.TxtCalculation;
import com.zhny.project.txt.calculation.mapper.TxtCalculationMapper;
import com.zhny.project.txt.compute.domain.Compute;
import com.zhny.project.txt.compute.mapper.ComputeMapper;
import com.zhny.project.txt.compute.service.IComputeService;
import com.zhny.project.txt.dataset.domain.TxtDataset;
import com.zhny.project.txt.dataset.mapper.TxtDatasetMapper;
import com.zhny.project.txt.indexset.domain.TxtIndexset;
import com.zhny.project.txt.indexset.mapper.TxtIndexsetMapper;
import com.zhny.project.txt.model.domain.TxtModel;
import com.zhny.project.txt.model.mapper.TxtModelMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;

/**
 * 算法计算Service业务层处理
 *
 * @author yanqun
 * @date 2019-12-16
 */
@Service
public class ComputeServiceImpl implements IComputeService
{
    @Autowired
    private ComputeMapper computeMapper;
    @Autowired
    private TxtDatasetMapper txtDatasetMapper;
    @Autowired
    private TxtIndexsetMapper txtIndexsetMapper;
    @Autowired
    private TxtModelMapper txtModelMapper;
    @Autowired
    private IComputeService computeService;
    @Autowired
    private TxtCalculationMapper txtCalculationMapper;


    /**
     * 查询算法计算
     *
     * @param datasetKey 算法计算ID
     * @return 算法计算
     */
    @Override
    public Compute selectComputeById(String datasetKey)
    {
        return computeMapper.selectComputeById(datasetKey);
    }

    /**
     * 查询算法计算列表
     *
     * @param compute 算法计算
     * @return 算法计算
     */
    @Override
    public List<Compute> selectComputeList(Compute compute)
    {
        return computeMapper.selectComputeList(compute);
    }

    /**
     * 新增算法计算
     *
     * @param compute 算法计算
     * @return 结果
     */
    @Override
    public int insertCompute(Compute compute)
    {
        compute.setCreateBy(ShiroUtils.getUserId().toString());
        compute.setDatasetKey(UUID.randomUUID().toString());
        compute.setCreateTime(DateUtils.getNowDate());
        return computeMapper.insertCompute(compute);
    }

    /**
     * 修改算法计算
     *
     * @param compute 算法计算
     * @return 结果
     */
    @Override
    public int updateCompute(Compute compute)
    {
        compute.setUpdateBy(ShiroUtils.getUserId().toString());
        compute.setUpdateTime(DateUtils.getNowDate());
        return computeMapper.updateCompute(compute);
    }

    /**
     * 删除算法计算对象
     *
     * @param ids 需要删除的数据ID
     * @return 结果
     */
    @Override
    public int deleteComputeByIds(String ids)
    {
        return computeMapper.deleteComputeByIds(Convert.toStrArray(ids));
    }

    /**
     * 删除算法计算信息
     *
     * @param datasetKey 算法计算ID
     * @return 结果
     */
    public int deleteComputeById(String datasetKey)
    {
        return computeMapper.deleteComputeById(datasetKey);
    }

    //生成模型
    @Override
    @Transactional
    public String selectPatterns(String datasetKey){
        System.out.print(datasetKey);
        TxtDataset txtDataset2 = txtDatasetMapper.selectTabledatasetIco(datasetKey);
        //存表名
        TxtDataset txtDataset = txtDatasetMapper.selectTableNameByDatasetKey(datasetKey);
        //存字段名
        List<TxtIndexset> txtIndexsetList = txtIndexsetMapper.selectSyllable(datasetKey);
        StringBuffer Sar = new StringBuffer();
        for (TxtIndexset txtIndexsetlist:  txtIndexsetList){
            Sar.append(txtIndexsetlist.getEnglishName() + ",");
        }
        System.out.print(Sar);
        //存_quality字段
//        List<TxtIndexset> txtIndexsetList1 = txtIndexsetMapper.selectQuality(datasetKey);
        System.out.print(Sar);
        String   text = Sar.substring(0,Sar.length() - 1);

        StringBuffer sql = new StringBuffer();
        sql.append("SELECT ");
        if (txtDataset2.getDisposeIcon().equals("4")){
            sql.append("eigan_value,");
        }
        sql.append(text);
        sql.append(" FROM " + txtDataset.getSurfaceName());
        System.out.print(sql);
        return sql.toString();
    }

    //分析计算
    @Override
    @Transactional
    public String selectCalculation(String datasetKey){
        System.out.print(datasetKey);
        //存表名
        TxtDataset txtDataset = txtDatasetMapper.selectTableNameByDatasetKey(datasetKey);
        //存字段名
        List<TxtIndexset> txtIndexsetList = txtIndexsetMapper.selectSyllable(datasetKey);
        StringBuffer Sar = new StringBuffer();
        for (TxtIndexset txtIndexsetlist:  txtIndexsetList){
            Sar.append(txtIndexsetlist.getEnglishName() + ",");
        }
        System.out.print(Sar);
        //存_quality字段
//        List<TxtIndexset> txtIndexsetList1 = txtIndexsetMapper.selectQuality(datasetKey);
//
//        StringBuffer quality = new StringBuffer();
//        for (TxtIndexset txtIndexset:  txtIndexsetList1){
//            quality.append(txtIndexset.getTxtQuality() + ",");
//        }
        System.out.print(Sar);
        String   text = Sar.substring(0,Sar.length() - 1);

        StringBuffer sql = new StringBuffer();
        sql.append("SELECT table_key,");
        sql.append(text);
        sql.append(" FROM " + txtDataset.getSurfaceName());
        System.out.print(sql);
        return sql.toString();
    }


    public int selectMun(String datasetKey){
        int  mun = txtIndexsetMapper.selectMun(datasetKey);
        return mun;
    }

    @Transactional
    public int updateTXT(String datasetKey,String tableName,String resultTable){
        int result = 0;

        StringBuffer sql = new StringBuffer();
        sql.append("select table_key,eigan_value from ");
        sql.append(resultTable);
//        sql.append(" where dataset_key = '");
//        sql.append(datasetKey + "'");

        List<LinkedHashMap<String, Object>> tableKeyEiganValueList = computeMapper.selectTableKeyEiganValueList(sql.toString());
        for(LinkedHashMap linkedHashMap : tableKeyEiganValueList){
//           Object a   = linkedHashMap.get("table_key");
//            String d = linkedHashMap.get("eigan_value").toString();
            StringBuffer updatesSql = new StringBuffer();
            updatesSql.append("update ");
            updatesSql.append(tableName);
            updatesSql.append(" set eigan_value = ");
            updatesSql.append(linkedHashMap.get("eigan_value").toString());
            updatesSql.append(" where table_key = '");
            updatesSql.append(linkedHashMap.get("table_key").toString() + "'");
          result =   computeMapper.updateTXTtable(updatesSql.toString());

        }
        return result;
    }

    @Override
    public int createModal(TxtModel txtModel){
        int result = 0;
        String compute = computeService.selectPatterns(txtModel.getDatasetKey());
        int mun = computeService.selectMun(txtModel.getDatasetKey());
        //查询表名
        TxtDataset txtDataset = txtDatasetMapper.selectTableNameByDatasetKey(txtModel.getDatasetKey());
        //表名
        String tableName = "r_" + txtDataset.getSurfaceName();
        //文件路径

        String savePath = RuoYiConfig.getModelPath() + "/" + txtModel.getSavePath();

        if (txtModel.getAlgorithmId().equals("1")) {
            //调用随机森林算法
            if (!txtModel.getDatasetKey().equals(null)){
                String sql = "(" + compute + " WHERE 1 = 1) t";
                RandomForestClassificationSQLUtil.excMySQLSelectPredictModel(sql, savePath,
                        5,
                        32,
                        12345,
                        6,
                        3,
                        mun);
            }

            txtModel.setAlgorithmId("1");
            txtModel.setModelKey(UUID.randomUUID().toString());
            txtModel.setCreateTime(DateUtils.getNowDate());
            txtModel.setCreateBy(ShiroUtils.getUserId().toString());
            txtModel.setDeleteFlag(0);
         //   txtModel.setSavePath(savePath);
            txtModel.setHandleBy(ShiroUtils.getUserId().toString());
            txtModel.setHandleTime(DateUtils.getNowDate());
            result = txtModelMapper.insertTxtModel(txtModel);
        } else {
            //调用贝叶斯算法
            if (!txtModel.getDatasetKey().equals(null)) {
                String sql = "(" + compute + " WHERE 1 = 1) t";
                NaiveBayesSQLUtil.excMySQLSelectPredictModel(
                        sql, savePath,
                        mun);
            }

            txtModel.setAlgorithmId("2");
            txtModel.setModelKey(UUID.randomUUID().toString());
            txtModel.setCreateTime(DateUtils.getNowDate());
            txtModel.setCreateBy(ShiroUtils.getUserId().toString());
            txtModel.setDeleteFlag(0);
         //   txtModel.setSavePath(savePath);
            txtModel.setHandleBy(ShiroUtils.getUserId().toString());
            txtModel.setHandleTime(DateUtils.getNowDate());
            result = txtModelMapper.insertTxtModel(txtModel);
        }

        return result;
    };

    /**
     * 校验生成模型保存路径
     */
    public String checkSavePathUnique(TxtModel txtModel){
        String modelKey = txtModel.getModelKey();
        TxtModel info = txtModelMapper.checkSavePathUnique(txtModel.getSavePath());
        if (StringUtils.isNotNull(info) && !info.getModelKey() .equals(modelKey)) {
            return UserConstants.MESSAGE_NOT_UNIQUE;
        }
        return UserConstants.MESSAGE_UNIQUE;
    }

    /**
     * 校验模型名称
     */
    public String checkModelNamehUnique(TxtModel txtModel){
        String modelKey = txtModel.getModelKey();
        TxtModel info = txtModelMapper.checkModelNamehUnique(txtModel.getModelName());
        if (StringUtils.isNotNull(info) && !info.getModelKey() .equals(modelKey)) {
            return UserConstants.MESSAGE_NOT_UNIQUE;
        }
        return UserConstants.MESSAGE_UNIQUE;
    }


    @Override
    public int editAnalyze(TxtCalculation txtCalculation){
        int result = 0;
        TxtModel txtModel = txtModelMapper.selectAlgorithm(txtCalculation.getModelKey());
        String compute = computeService.selectCalculation(txtCalculation.getDatasetKey());
        int mun = computeService.selectMun(txtCalculation.getDatasetKey());
        //查询表名
        TxtDataset txtDataset = txtDatasetMapper.selectTableNameByDatasetKey(txtCalculation.getDatasetKey());
        //表名
        String tableName = "r_" + txtDataset.getSurfaceName();
        //文件路径
        String savePath = RuoYiConfig.getModelPath() + "/" + txtModel.getSavePath();
        //随机森林
        if(txtModel.getAlgorithmId().equals("1")) {

            if (!txtCalculation.getDatasetKey().equals(null)){
                String sql = "(" + compute + " WHERE 1 = 1) t";
                RandomForestClassificationSQLUtil.excMySQLDataComputeAndSave(savePath,
                        sql,
                        "zhny",
                        tableName,
                        mun
                       );
            }
            //查询算法新生成表中table_key,eigan_value
            StringBuffer sqlSelect = new StringBuffer();
            sqlSelect.append("SELECT data,feature from ");
            sqlSelect.append(tableName);
            List<LinkedHashMap<String, Object>> tableKeyEiganValueList = computeMapper.selectTableKeyEiganValueList(sqlSelect.toString());
            //更新txt表中eigan_value  其值为算法新生成表中eigan_value值
            for(LinkedHashMap linkedHashMap : tableKeyEiganValueList){
                StringBuffer updatesSql = new StringBuffer();
                updatesSql.append("update ");
                updatesSql.append(txtDataset.getSurfaceName());
                updatesSql.append(" set eigan_value = ");
                updatesSql.append(linkedHashMap.get("feature").toString());
                updatesSql.append(" where table_key = '");
                updatesSql.append(linkedHashMap.get("data").toString() + "'");
                computeMapper.updateTXTtable(updatesSql.toString());
            }

            //删除计算结果临时表
            StringBuffer deleteSql = new StringBuffer();
            deleteSql.append("DROP TABLE ");
            deleteSql.append(tableName);
            computeMapper.deleteTmpTable(deleteSql.toString());

            //向txt_calculation表中插入一条分析计算数据
            txtCalculation.setDeleteFlag(0);
            txtCalculation.setSavePath(savePath);
            txtCalculation.setCalculationKey(UUID.randomUUID().toString());
            txtCalculation.setCreateBy(ShiroUtils.getUserId().toString());
            txtCalculation.setCreateTime(DateUtils.getNowDate());
            txtCalculation.setHandleBy(ShiroUtils.getUserId().toString());
            txtCalculation.setHandleTime(DateUtils.getNowDate());
            result = txtCalculationMapper.insertTxtCalculation(txtCalculation);
            //txt_dataset表中处理标识改为4 修改分析人 修改时间 修改者
            TxtDataset txtDataset1 = new TxtDataset();
            txtDataset1.setDisposeIcon("4");
            txtDataset1.setAnalystBy(ShiroUtils.getUserId().toString());
            txtDataset1.setUpdateTime(DateUtils.getNowDate());
            txtDataset1.setUpdateBy(ShiroUtils.getUserId().toString());
            txtDataset1.setDatasetKey(txtCalculation.getDatasetKey());
            txtDatasetMapper.updateTxtDataset(txtDataset1);
            //贝叶斯
        }else{
            if (!txtCalculation.getDatasetKey().equals(null)){
                String sql = "(" + compute + " WHERE 1 = 1) t";
                NaiveBayesSQLUtil.excMySQLDataComputeAndSave(savePath,
                        sql,
                        "zhny",
                        tableName,
                        mun
                );
            }
            //查询算法新生成表中table_key,eigan_value
            StringBuffer sqlSelect = new StringBuffer();
            sqlSelect.append("SELECT data,feature from ");
            sqlSelect.append(tableName);
            List<LinkedHashMap<String, Object>> tableKeyEiganValueList = computeMapper.selectTableKeyEiganValueList(sqlSelect.toString());
            //更新txt表中eigan_value  其值为算法新生成表中eigan_value值
            for(LinkedHashMap linkedHashMap : tableKeyEiganValueList){
                StringBuffer updatesSql = new StringBuffer();
                updatesSql.append("update ");
                updatesSql.append(txtDataset.getSurfaceName());
                updatesSql.append(" set eigan_value = ");
                updatesSql.append(linkedHashMap.get("feature").toString());
                updatesSql.append(" where table_key = '");
                updatesSql.append(linkedHashMap.get("data").toString() + "'");
                computeMapper.updateTXTtable(updatesSql.toString());
            }

            //删除计算结果临时表
            StringBuffer deleteSql = new StringBuffer();
            deleteSql.append("DROP TABLE ");
            deleteSql.append(tableName);
            computeMapper.deleteTmpTable(deleteSql.toString());

            //向txt_calculation表中插入一条分析计算数据
            txtCalculation.setDeleteFlag(0);
            txtCalculation.setSavePath(savePath);
            txtCalculation.setCalculationKey(UUID.randomUUID().toString());
            txtCalculation.setCreateBy(ShiroUtils.getUserId().toString());
            txtCalculation.setCreateTime(DateUtils.getNowDate());
            txtCalculation.setHandleBy(ShiroUtils.getUserId().toString());
            txtCalculation.setHandleTime(DateUtils.getNowDate());
            result = txtCalculationMapper.insertTxtCalculation(txtCalculation);
            //txt_dataset表中处理标识改为4 修改分析人 修改时间 修改者
            TxtDataset txtDataset1 = new TxtDataset();
            txtDataset1.setDisposeIcon("4");
            txtDataset1.setAnalystBy(ShiroUtils.getUserId().toString());
            txtDataset1.setUpdateTime(DateUtils.getNowDate());
            txtDataset1.setUpdateBy(ShiroUtils.getUserId().toString());
            txtDataset1.setDatasetKey(txtCalculation.getDatasetKey());
            txtDatasetMapper.updateTxtDataset(txtDataset1);
        }
        return result;
    };

}
