package com.task.core.service.impl;

import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.annotation.Resource;

import com.task.common.constant.ErrorCodeConstants;
import com.task.common.exception.ErrorCode;
import com.task.common.redis.RedisKeyConstants;
import com.task.common.utils.GeneratorIdUtils;
import com.task.core.domain.DatasourceInfo;
import com.task.core.service.*;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.web.multipart.MultipartFile;

import com.alibaba.fastjson2.JSON;
import com.task.common.core.redis.RedisCache;
import com.task.common.enums.DataFileTypeEnum;
import com.task.common.enums.ImportStatusEnum;
import com.task.common.exception.ServiceException;
import com.task.common.utils.SecurityUtils;
import com.task.common.utils.StringUtils;
import com.task.common.utils.SystemFunctionUtil;
import com.task.common.utils.date.DateConstants;
import com.task.common.utils.date.DateUtil;
import com.task.common.utils.file.FileUtils;
import com.task.common.utils.uuid.UUID;
import com.task.core.domain.DataOperationInfo;
import com.task.core.domain.SysFileInfo;
import com.task.core.domain.SystemSettingInfo;
import com.task.core.domain.info.DataImportProgress;
import com.task.core.domain.info.ExcelFileImportInfo;
import com.task.core.dto.request.DataFileUploadRequestDTO;
import com.task.core.dto.request.SysFileRequestDTO;
import com.task.core.dto.request.SysSettingRequestDTO;
import com.task.core.mapper.DataOperationMapper;

import lombok.extern.slf4j.Slf4j;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

/**
 * @author 迪迦.
 * @date 2024/11/13 13:57
 */
@Slf4j
@Service
public class DataFileOperationServiceImpl implements DataFileOperationService {

  public final static String DATA_IMPORT_PROGRESS_ALL_COUNT = "dataImportProgressAllCount";

  public final static String DATA_IMPORT_STATUS = "dataImportStatus";

  public final static String DATA_IMPORT_FILE_NAME = "dataImportFileName";

  public final static String DATA_IMPORT_FILE_TIME = "dataImportFileTime";

  public final static String DATA_IMPORT_KEY = "dataImportKey:";

  public final static String DATA_IMPORT_PROGRESS_IMPORT_COUNT = "dataImportProgressImportCount";


  @Resource
  private SysFileService fileService;

  @Resource
  private RedisCache redisCache;

  @Resource
  private DataOperationMapper dataOperationMapper;

  @Resource
  private DatasourceService datasourceService;


  @Resource
  private RefererSourceService refererSourceService;

  @Resource
  private DataOperationService dataOperationService;

  @Resource
  private MissionService missionService;

  @Resource
  private SendHistoryService sendHistoryService;

  @Resource
  private SysSettingService sysSettingService;

  @Resource
  private DataPreAvailableService dataPreAvailableService;

  @Resource
  private DataNoticeService dataNoticeService;

  @Override
  public SysFileInfo dataFileUpload(DataFileUploadRequestDTO request)
      throws IOException, URISyntaxException {
    DatasourceInfo dataSource = datasourceService.getDataSourceByCode(request.getDataSourceCode());
    if(ObjectUtils.isEmpty(dataSource)){
      throw new ServiceException(ErrorCodeConstants.DATA_SOURCE_NOT_EXIST.getMsg());
    }
    String fileName = "";
    if (StringUtils.isEmpty(request.getFileType())) {
      throw new ServiceException("未传入文件类型");
    }
    List<String> sufs = new ArrayList<>();
    if (DataFileTypeEnum.SCRIPT.name().equals(request.getFileType())) {
      SystemSettingInfo settingByTypeAndKey = sysSettingService.getSettingByTypeAndKey(new SysSettingRequestDTO("UPLOAD", "SCRIPT_FILE_SUFFIX"));
      if (ObjectUtils.isEmpty(settingByTypeAndKey) || StringUtils.isEmpty(settingByTypeAndKey.getValue())) {
        throw new ServiceException("未配置脚本上传后缀");
      }
      sufs = Arrays.asList(settingByTypeAndKey.getValue().split(","));
    } else if (DataFileTypeEnum.DATA.name().equals(request.getFileType())) {
      SystemSettingInfo settingByTypeAndKey = sysSettingService.getSettingByTypeAndKey(new SysSettingRequestDTO("UPLOAD", "DATA_FILE_SUFFIX"));
      if (ObjectUtils.isEmpty(settingByTypeAndKey) || StringUtils.isEmpty(settingByTypeAndKey.getValue())) {
        throw new ServiceException("未配置Data上传后缀");
      }
      sufs = Arrays.asList(settingByTypeAndKey.getValue().split(","));
    } else if (DataFileTypeEnum.REFERER.name().equals(request.getFileType())) {
      SystemSettingInfo settingByTypeAndKey = sysSettingService.getSettingByTypeAndKey(new SysSettingRequestDTO("UPLOAD", "REFERER_FILE_SUFFIX"));
      if (ObjectUtils.isEmpty(settingByTypeAndKey) || StringUtils.isEmpty(settingByTypeAndKey.getValue())) {
        throw new ServiceException("未配置Referer上传后缀");
      }
      sufs = Arrays.asList(settingByTypeAndKey.getValue().split(","));
    }
    if (!CollectionUtils.isEmpty(sufs)) {
      sufs = sufs.stream().map(String::toLowerCase).collect(Collectors.toList());
      MultipartFile file = request.getFile();
      if (null == file || file.isEmpty()) {
        throw new ServiceException("上传的文件对象不存在...");
      }
      fileName = file.getOriginalFilename();
      if (org.springframework.util.StringUtils.isEmpty(fileName)) {
        fileName = "newFile";
      }
      String[] split = fileName.split("\\.");
      String suffix = split.length > 1 ? split[split.length - 1] : "";
      if (StringUtils.isEmpty(suffix)) {
        throw new ServiceException("上传的文件格式错误");
      }
      suffix = suffix.toLowerCase();
      if (!sufs.contains(suffix)) {
        throw new ServiceException("不支持的文件格式,需要" + sufs);
      }
      request.setSuffix(suffix);
    }
    SysFileRequestDTO fileRequestDTO = new SysFileRequestDTO();
    fileRequestDTO.setFile(request.getFile());
    //上传数据文件,需要吧文件中的表格数据存到数据库
    //需要留下源文件痕迹方便溯源
    String code = UUID.randomUUID().toString().replace("-", "");
    fileRequestDTO.setCode(code);
    request.setFileName(fileName);
    fileRequestDTO.setFromCode(request.getDataSourceCode());
    SysFileInfo upload = fileService.upload(fileRequestDTO, SecurityUtils.getLoginUser().getUserId());
    request.setOperationBy(SecurityUtils.getLoginUser().getUserId());
    if (DataFileTypeEnum.DATA.name().equals(request.getFileType())) {
      new Thread(() -> {
        try {
          importData(request, code, upload.getSource(), SystemFunctionUtil.TABLE_PREFIX_INFO);
          // 导入数据源成功后，统一通知
//          dataNoticeService.sendMessage(request.getDataSourceCode(),dataSource.getDatasourceName()+","+ErrorCodeConstants.DATA_IMPORT_SUCCESS.getMsg());
        } catch (IOException e) {
          //更新操作历史数据库
//          dataNoticeService.sendMessage(request.getDataSourceCode(),dataSource.getDatasourceName()+","+ErrorCodeConstants.DATA_IMPORT_FAIL.getMsg());
          dataOperationMapper.updateImportHistory(code, ImportStatusEnum.FAILED.name());
          throw new RuntimeException(e);
        }
      }).start();
    } else if (DataFileTypeEnum.REFERER.name().equals(request.getFileType())) {
      new Thread(() -> {
        try {
          importData(request, code, upload.getSource(), SystemFunctionUtil.REFERER_TABLE_PREFIX_INFO);
        } catch (IOException e) {
          //更新操作历史数据库
          dataOperationMapper.updateImportHistory(code, ImportStatusEnum.FAILED.name());
          throw new RuntimeException(e);
        }
      }).start();
    }
    return upload;
  }



  @Override
  public DataImportProgress getDataImportProgress() {
    return getProgress(DATA_IMPORT_KEY, DATA_IMPORT_FILE_TIME);
  }

  public DataImportProgress getProgress(String key, String timeFormat) {
    Collection<String> keys = redisCache.keys(key + SecurityUtils.getUserId() + ":*");
    if (CollectionUtils.isEmpty(keys)) {
      return new DataImportProgress(Collections.emptyList());
    }

    List<String> fileCodes = keys.stream()
        .map(f -> f.split(":")[2])
        .distinct()
        .collect(Collectors.toList());
    Long userId = SecurityUtils.getUserId();
    DateTimeFormatter dtf = DateTimeFormatter.ofPattern(DateConstants.DATE_FORMAT);
    List<Map<String, Object>> objects = fileCodes.parallelStream()
        .map(fileCode -> {
          Map<String, Object> map = redisCache.getCacheMap(
              key + userId + ":" + fileCode);
          if (map != null) {
            map.put("fileCode", fileCode);
          }
          return map;
        })
        .filter(Objects::nonNull).sorted((o1, o2) -> {
          String time1 = (String) o1.get(timeFormat);
          String time2 = (String) o2.get(timeFormat);
          //将时间字符串转换为 LocalDateTime 进行比较
          LocalDateTime dateTime1 = LocalDateTime.parse(time1, dtf);
          LocalDateTime dateTime2 = LocalDateTime.parse(time2, dtf);
          return dateTime2.compareTo(dateTime1);
        }).collect(Collectors.toList());
    return new DataImportProgress(objects);
  }


  @Override
  public void stopImport(String fileCode) {
    //存入当前任务状态
    redisCache.redisTemplate.opsForHash()
        .put(DATA_IMPORT_KEY + SecurityUtils.getUserId() + ":" + fileCode, DATA_IMPORT_STATUS,
            ImportStatusEnum.STOP.name());
    redisCache.expire(DATA_IMPORT_KEY + SecurityUtils.getUserId() + ":" + fileCode, 1,
        TimeUnit.MINUTES);
  }

  //@Override
  //public void copyTaskData(String tableName) {
  //  new Thread(() -> {
  //    for (int i = 1; i < 11; i++) {
  //      String operationTable = tableName + i;
  //      for (int j = 0; j < 110; j++) {
  //        try {
  //          dataOperationMapper.copyTaskData(operationTable);
  //          log.info("{}第{}次复制完成", operationTable, j);
  //        } catch (Exception e) {
  //          log.error("{}第{}次复制失败", operationTable, j);
  //        }
  //      }
  //    }
  //    log.info(
  //        "----------------------------------------------所有表复制完成--------------------------------------------");
  //  }).start();
  //}

  @Override
  public void clearSuccessRecord(List<String> fileCodes) {
    for (String fileCode : fileCodes) {
      String redisKey = DATA_IMPORT_KEY + SecurityUtils.getUserId() + ":" + fileCode;
      redisCache.deleteObject(redisKey);
    }
  }

  @Override
  public DataImportProgress getDataExportProgress() {
    return getProgress(DataOperationServiceImpl.DATA_EXPORT_KEY, DataOperationServiceImpl.DATA_EXPORT_FILE_TIME);
  }

  /**
   * 导入数据
   *
   * @param request 源文件传输实体
   * @param code    源文件在系统中file表的code
   */
  private void importData(DataFileUploadRequestDTO request, String code, String source,
                          String tablePre) throws IOException {
    dataOperationMapper.insertImportHistory(request, code, ImportStatusEnum.IMPORTING.name());
    ExcelFileImportInfo dataInfo = new ExcelFileImportInfo();
    if ("csv".equals(request.getSuffix())) {
      dataInfo = ExcelReader.readExcelCsv(source);
    } else {
      File file = FileUtils.multipartFileToFile(request.getFile());
      dataInfo = ExcelReader.readExcelXlsx(file);
    }
    String timeFm = DateUtil.dateToStr(new Date(), DateConstants.DATE_TIME_FORMAT_NUM);
    List<DataOperationInfo> infos = new ArrayList<>();
    for (Map<String, Object> datum : dataInfo.getData()) {
      infos.add(new DataOperationInfo(UUID.fastUUID().toString().replace("-", ""),
          request.getDataSourceCode(), JSON.toJSONString(datum), code, request.getOperationBy()));
    }
    //将表头信息存入数据库
    dataOperationMapper.insertImportDataHead(String.join(",", dataInfo.getHeader()), code,
        request.getDataSourceCode(), request.getOperationBy(), request.getFileName());
    //分批次分表存数据库
    boolean hasNext = true;
    int index = 0;
    int end = infos.size() - 1;
    int addCount = 100;
    int tableCount = 20;
    String redisKey = DATA_IMPORT_KEY + request.getOperationBy() + ":" + code;
    // 总数存入缓存
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_IMPORT_PROGRESS_ALL_COUNT, infos.size());
    //存入当前任务状态
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_IMPORT_STATUS, ImportStatusEnum.IMPORTING.name());
    //文件名
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_IMPORT_FILE_NAME, request.getFileName());
    //导入数据时间
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_IMPORT_FILE_TIME, DateUtil.nowDateTimeStr());
    redisCache.expire(redisKey, 1, TimeUnit.HOURS);
    while (hasNext) {
      Object cacheObject = redisCache.redisTemplate.opsForHash().get(redisKey, DATA_IMPORT_STATUS);
      if (ObjectUtils.isEmpty(cacheObject) || ImportStatusEnum.STOP.name()
          .equals(cacheObject.toString())) {
        break;
      }
      int indexStart = index;
      //一次遍历一千,每张表每次插入一百
      index = indexStart + (addCount * tableCount);
      //初始化各个表需要添加的数据map
      Map<String, List<DataOperationInfo>> initData = initImportData(tableCount, tablePre);
      for (int i = indexStart; i < index; i++) {
        if (i > end) {
          hasNext = false;
          break;
        }
        //分别按照1:1比例放入10张表中
        long generateId = GeneratorIdUtils.snowflakeId();
        long tableSuf = generateId % 20 + 1;
        try{
          infos.get(i).setId(String.valueOf(generateId));
          initData.get(tablePre + tableSuf).add(infos.get(i));
        }catch (Exception e) {
          log.error("导入数据出错,获取分布式id错误");
        }
      }
      //存入数据库
      for (Entry<String, List<DataOperationInfo>> entry : initData.entrySet()) {
        if (!CollectionUtils.isEmpty(entry.getValue())) {
          try {
            dataOperationMapper.insertImportData(entry.getKey(), entry.getValue());

            //每张分表所存入数据量存入缓存
            redisCache.redisTemplate.opsForHash().increment(RedisKeyConstants.DATA_IMPORT_KEY.formatKey(request.getOperationBy(),code),
                    entry.getKey(),entry.getValue().size());
            //每完成一次缓存中已完成数+size
            redisCache.redisTemplate.opsForHash()
                .increment(redisKey, DATA_IMPORT_PROGRESS_IMPORT_COUNT, entry.getValue().size());
          } catch (Exception e) {
            log.error("数据插入失败..");
          }
        }
      }
    }
    Object cacheObject = redisCache.redisTemplate.opsForHash().get(redisKey, DATA_IMPORT_STATUS);
    if (!ObjectUtils.isEmpty(cacheObject) && ImportStatusEnum.STOP.name()
        .equals(cacheObject.toString())) {
      redisCache.deleteObject(redisKey);
      //更新操作历史数据库
      dataOperationMapper.updateImportHistory(code, ImportStatusEnum.STOP.name());
      deleteDataTableByFileCode(code, tablePre, request.getFileType());
      return;
    }
    //存完之后立马把已完成数变成总数
    //先更新已经添加过的记录总数到数据源中
    Long increment = redisCache.redisTemplate.opsForHash()
        .increment(redisKey, DATA_IMPORT_PROGRESS_IMPORT_COUNT, 0);
    Map<String, Long> childTableCountMap = new HashMap<>();
    String tempTable = tablePre;
    for (int i = 1; i <=20; i++) {
      tempTable = tempTable+i;
      Long childTableCount=redisCache.redisTemplate.opsForHash().increment(RedisKeyConstants.DATA_IMPORT_KEY.formatKey(request.getOperationBy(),code),tempTable,0);
      childTableCountMap.put(Integer.valueOf(i).toString(),childTableCount);
      tempTable = tablePre;
    }
    Integer intValue = Math.toIntExact(increment);
    if (SystemFunctionUtil.TABLE_PREFIX_INFO.equals(tablePre)) {
      datasourceService.updateDataCount(intValue, request.getDataSourceCode());
     // dataPreAvailableService.initPreAvailable(childTableCountMap,request.getDataSourceCode(),intValue,request.getOperationBy().toString());
      missionService.updateMissionAndSourceAllCountBySourceCode(intValue, request.getDataSourceCode(), DataFileTypeEnum.DATA.name());
    } else if (SystemFunctionUtil.REFERER_TABLE_PREFIX_INFO.equals(tablePre)) {
    //  dataPreAvailableService.initPreAvailable(childTableCountMap,request.getDataSourceCode(),intValue,request.getOperationBy().toString());
      refererSourceService.updateDataCount(intValue, request.getDataSourceCode());
    }
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_IMPORT_PROGRESS_IMPORT_COUNT, infos.size());
    //存入当前任务状态
    redisCache.redisTemplate.opsForHash()
        .put(redisKey, DATA_IMPORT_STATUS, ImportStatusEnum.END.name());
    redisCache.expire(redisKey, 1, TimeUnit.DAYS);
    //更新操作历史数据库
    dataOperationMapper.updateImportHistory(code, ImportStatusEnum.END.name());

  }

  private void deleteDataTableByFileCode(String code, String tablePre, String dataType) {
    dataOperationService.deleteByFileCode(code, null, dataType);
  }


  private Map<String, List<DataOperationInfo>> initImportData(int tableCount, String tablePre) {
    Map<String, List<DataOperationInfo>> initData = new HashMap<>();
    for (int i = 1; i < tableCount + 1; i++) {
      initData.put(tablePre + i, new ArrayList<>());
    }
    return initData;
  }

}
