package com.jhhc.StormSurgeForecast.Calculate.Service.Impl;

import com.jhhc.FloodForecast.Sensor.Dao.BDMS_ST_SENSOR_BMapper;
import com.jhhc.FloodForecast.Sensor.Pojo.BDMS_ST_SENSOR_B;
import com.jhhc.RHDB.Data.Dao.ST_TIDE_RMapper;
import com.jhhc.RHDB.Data.Pojo.ST_TIDE_R;
import com.jhhc.StormSurgeForecast.AstronomicalTide.Dao.BDMS_CALCULATESCHEME_M1Mapper;
import com.jhhc.StormSurgeForecast.AstronomicalTide.Dao.BDMS_FBC_RESULT_PMapper;
import com.jhhc.StormSurgeForecast.AstronomicalTide.Pojo.BDMS_FBC_RESULT_P;
import com.jhhc.StormSurgeForecast.AstronomicalTide.Util.ModelFileRead;
import com.jhhc.StormSurgeForecast.Calculate.BO.CenterUrl;
import com.jhhc.StormSurgeForecast.Calculate.Service.ModelService;
import com.jhhc.StormSurgeForecast.Calculate.util.CalUtil;
import com.jhhc.StormSurgeForecast.Calculate.util.DosUtil;
import com.jhhc.StormSurgeForecast.Calculate.util.DownloadUtil;
import com.jhhc.StormSurgeForecast.Calculate.util.ModelUtil;
import com.jhhc.StormSurgeForecast.StormSurge.Pojo.FBC.BDMS_FBC_OCEANMODEL_B;
import com.jhhc.utils.JsonResult;
import lombok.RequiredArgsConstructor;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @author LYQ
 */
@Service
@RequiredArgsConstructor
public class ModelServiceImpl implements ModelService {

    private final BDMS_CALCULATESCHEME_M1Mapper m1Mapper;

    private final BDMS_FBC_RESULT_PMapper pMapper;

    private final ST_TIDE_RMapper rMapper;

    private final BDMS_ST_SENSOR_BMapper bMapper;

    private final RestTemplate restTemplate;

    @Async(value = "asyncServiceExecutor")
    @Override
    public void queryByStatus(String calSchemeId, CenterUrl centerUrl, String token, String jobId,
                              String localPath,String linuxPath,String modelId) {
        CalUtil calUtil = new CalUtil();
        linuxPath = linuxPath + "/";
        String jobStatus = calUtil.queryJobs(centerUrl.getHpcUrl(), token,jobId);
        while (!Objects.equals(jobStatus,"statC")){
            try {
                TimeUnit.SECONDS.sleep(60);
                jobStatus = calUtil.queryJobs(centerUrl.getHpcUrl(), token,jobId);
                m1Mapper.updateStatus("1", calSchemeId);
            } catch (InterruptedException e) {
                e.printStackTrace();
                System.out.println("异步线程异常");
            }
        }
        if ("statC".equals(jobStatus)) {
            List<BDMS_FBC_RESULT_P> ps;
            List<BDMS_FBC_RESULT_P> pList = new ArrayList<>();
            DownloadUtil downloadUtil = new DownloadUtil();
            ModelFileRead modelFileRead = new ModelFileRead();
            switch (modelId){
                case "HHUCCHAOSUANNS"://超算模型
                case "HHUCCHAOSUAN":
                case "JSCoastSurge":
                case "JSChangJiang":
                    ps = pMapper.queyByCcschemeId(calSchemeId);
                    if (ps != null && !ps.isEmpty()) {
                        System.out.println("数据已存在");
                    }else {
                        boolean exist = calUtil.fileExist(centerUrl.getEfileUrl(),
                                token,linuxPath + "fort.61");
                        if (exist) {
                            downloadUtil.downloadGwpstas(centerUrl.getEfileUrl(),token,
                                    linuxPath,localPath);
                            //下载fort.61文件
                            downloadUtil.downloadFort61(centerUrl.getEfileUrl(),token,
                                    linuxPath,localPath);
                            pList = modelFileRead.insertReadFors61(localPath,calSchemeId,
                                    ModelUtil.modelType(modelId));
                            m1Mapper.updateStatus("3", calSchemeId);
                        }else {
                            m1Mapper.updateStatus("2", calSchemeId);
                        }
                        if (pList != null && pList.size() > 0) {
                            pMapper.batchInsert(pList);//站点数据入库
                        }else {
                            System.out.println("下载文件失败");
                        }
                        if ("HHUCCHAOSUAN".equals(modelId)) {
                            Map<String,Object> map = new HashMap<>();
                            map.put("calSchemeId",calSchemeId);
                            map.put("areaName","浙江沿海");
                            ResponseEntity<JsonResult> response = restTemplate.exchange(
                                    "http://localhost:2111/StormSurgeForcast/Calculate/Forecast/SaveCalResultByDomain?areaName={areaName}" +
                                            "&calSchemeId={calSchemeId}", HttpMethod.GET,null, JsonResult.class, map);
                        }
                    }
                    break;
                case "HoHaiSouthSeaZs"://南海模型
                case "HoHaiEastSeaZS"://东海模型
                    ps = pMapper.queyByCcschemeId(calSchemeId);
                    if (ps != null && ps.size() > 0) {
                        System.out.println("数据已存在");
                    }else {
                        downloadUtil.downloadGwpstas(centerUrl.getEfileUrl(),token,
                                linuxPath,localPath);
                        //下载tfupresuls.dat文件
                        downloadUtil.downloadTfupresuls(centerUrl.getEfileUrl(),token,
                                linuxPath,localPath);
                        pList = modelFileRead.readtfupresuls(localPath,calSchemeId,
                                ModelUtil.modelType(modelId));
                        if (pList != null) {
                            pMapper.batchInsert(pList);//站点数据入库
                            m1Mapper.updateStatus("3", calSchemeId);
                        }else {
                            System.out.println("下载文件失败");
                            m1Mapper.updateStatus("2", calSchemeId);
                        }
                    }
                    break;
                case "HoHaiRevise":
                    ps = pMapper.queyByCcschemeId(calSchemeId);
                    if (ps != null && ps.size() > 0) {
                        System.out.println("数据已存在");
                    }else {
                        String path5 = downloadUtil.downloadTideOutput(centerUrl.getEfileUrl(),token,
                                linuxPath,localPath);
                        pList = modelFileRead.modelJiaoZheng(path5,calSchemeId,5);
                        pMapper.batchInsert(pList);//站点数据入库
                        m1Mapper.updateStatus("3", calSchemeId);
                    }
                    break;
                case "HoHaiSmartZs":
                    ps = pMapper.queyByCcschemeId(calSchemeId);
                    if (ps != null && ps.size() > 0) {
                        System.out.println("数据已存在");
                    }else {
//                        String path2 = downloadUtil.downloadSurgeOutput(centerUrl.getEfileUrl(),token,
//                                linuxPath,localPath);
                        String path4 = downloadUtil.downloadStormtideOutput(centerUrl.getEfileUrl(),token,
                                linuxPath,localPath);
                        //List<BDMS_FBC_RESULT_P> p2 = modelFileRead.modelJiaoZheng(path2,calSchemeId,2);
                        pList = modelFileRead.modelJiaoZheng(path4,calSchemeId,4);
                        //pList.addAll(p2);
                        //pList.addAll(p4);
                        pMapper.batchInsert(pList);//站点数据入库
                        m1Mapper.updateStatus("3", calSchemeId);
                    }
                    break;
                default:
                    break;
            }
        }
    }

    @Async(value = "asyncServiceExecutor")
    @Override
    public void queryByStatus2(String modelPathn, String uuid, String modelId) {
        //执行计算命令
//        String[] cmds = {"/bin/sh", "-c", "cd " + modelPathn + " && sh sub.sh"};
//        DosUtil.executeCmd(cmds);
//        DosUtil.execute("sh " + modelPathn + "/sub.sh");
        DosUtil.execute2(modelPathn);
        //等待命令执行完成，
        ModelFileRead modelFileRead = new ModelFileRead();
        List<BDMS_FBC_RESULT_P> pList = modelFileRead.insertReadFors61(modelPathn, uuid, ModelUtil.modelType(modelId));
        m1Mapper.updateStatus("3", uuid);
        if (pList != null && pList.size() > 0) {
            pMapper.batchInsert(pList);//站点数据入库
        }
    }
    @Override
    public List<ST_TIDE_R> createTide(String projectId,String start,String end) {
        List<String> stcd = new ArrayList<>();
        stcd.add("70703400");//1110,0106.0003.00
        stcd.add("70301400");//1919,0104.0007.00
        List<BDMS_ST_SENSOR_B> bdmsStSensorBs = bMapper.tide(Integer.parseInt(projectId),stcd);
        stcd.clear();
        for (BDMS_ST_SENSOR_B bdmsStSensorB : bdmsStSensorBs) {
            stcd.add(bdmsStSensorB.getDatacode());
        }
        List<ST_TIDE_R> rList = rMapper.tide_output(start,end,stcd);
        List<ST_TIDE_R> stTideRs1110 = rList.stream().filter
                (s -> "0106.0003.00".equals(s.getStcd())).collect(Collectors.toList());
        stTideRs1110.forEach(s -> s.setStcd("1110"));
        List<ST_TIDE_R> stTideRs1919 = rList.stream().filter
                (s -> "0104.0007.00".equals(s.getStcd())).collect(Collectors.toList());
        stTideRs1919.forEach(s -> s.setStcd("1919"));
        stTideRs1110.addAll(stTideRs1919);
        return stTideRs1110;
    }
}
