package com.task.core.service.impl;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Resource;

import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;

import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.github.pagehelper.PageHelper;
import com.task.common.core.redis.RedisCache;
import com.task.common.enums.DataFileTypeEnum;
import com.task.common.enums.DuplicateConditionEnum;
import com.task.common.enums.ImportStatusEnum;
import com.task.common.exception.ServiceException;
import com.task.common.utils.SecurityUtils;
import com.task.common.utils.StringUtils;
import com.task.common.utils.SystemFunctionUtil;
import com.task.common.utils.constants.RedisConstants;
import com.task.common.utils.date.DateConstants;
import com.task.common.utils.date.DateUtil;
import com.task.core.domain.DataImportHeadInfo;
import com.task.core.domain.DataOperationInfo;
import com.task.core.domain.DatasourceInfo;
import com.task.core.domain.DeviceInfo;
import com.task.core.domain.ExportFileInfo;
import com.task.core.domain.MissionInfo;
import com.task.core.domain.RefererSourceInfo;
import com.task.core.domain.SysFileInfo;
import com.task.core.domain.info.DataHistoryImportInfo;
import com.task.core.domain.info.MissionSuccessRedisMap;
import com.task.core.domain.info.SourceConfig;
import com.task.core.dto.request.ClientMissionCompleteRequestDTO;
import com.task.core.dto.request.DataOperationRequestDTO;
import com.task.core.dto.request.ExportFileRequestDTO;
import com.task.core.dto.response.DataSearchResponseDTO;
import com.task.core.mapper.DataOperationMapper;
import com.task.core.mapper.SysFileMapper;
import com.task.core.service.DataOperationService;
import com.task.core.service.DatasourceService;
import com.task.core.service.RefererSourceService;
import com.task.core.service.SendHistoryService;
import com.task.core.task.into.DataIncrementInfo;

import lombok.extern.slf4j.Slf4j;

/**
 * @author 迪迦.
 * @date 2024/11/11 13:51
 */
@Slf4j
@Service
public class DataOperationServiceImpl implements DataOperationService {

  public final static String DATA_EXPORT_PROGRESS_ALL_COUNT = "dataExportProgressAllCount";

  public final static String DATA_EXPORT_STATUS = "dataExportStatus";

  public final static String DATA_EXPORT_FILE_NAME = "dataExportFileName";

  public final static String DATA_EXPORT_FILE_TIME = "dataExportFileTime";

  public final static String DATA_EXPORT_KEY = "dataExportKey:";

  public final static String DATA_EXPORT_PROGRESS_IMPORT_COUNT = "dataExportProgressImportCount";

  public final static String MISSION_SUCCESS_RETURN_KEY = "missionSuccessReturnKey";

  public final static String MISSION_FAILED_RETURN_KEY = "missionFailedReturnKey";

  @Resource
  private Environment environment;

  @Resource
  private DataOperationMapper dataOperationMapper;

  @Resource
  private DatasourceService datasourceService;

  @Resource
  private RefererSourceService refererSourceService;

  @Resource
  private RedisCache redisCache;

  @Resource
  private SysFileMapper sysFileMapper;

  @Resource
  private SendHistoryService sendHistoryService;

  @Override
  public List<DataOperationInfo> getNotUseData(int limit,
      List<String> dataSourceCodeList, String dataType) {
    if (DataFileTypeEnum.DATA.name().equals(dataType)) {
      return dataOperationMapper.getNotUseData(limit, dataSourceCodeList);
    }
    return dataOperationMapper.getNotUseRefererData(limit, dataSourceCodeList);
  }

  @Override
  public DataOperationInfo getDataById(String dataId, String tableName, String dataType, int tNum) {
    //if (StringUtils.isEmpty(tableName)) {
    //  //获取表名
    //  tableName = SystemFunctionUtil.getTableNameById(dataId, dataType);
    //}
    return dataOperationMapper.getDataById(dataId, tableName, tNum);
  }

  @Override
  public void changeDataStatusById(String code, boolean status, String tableName, String sourceCode,
      String dataType) {
    if (status) {
      redisCache.redisTemplate.opsForList().rightPush(RedisConstants.DATA_SEND_INCREMENT_KEY, new DataIncrementInfo(tableName, code));
      //dataOperationMapper.incrementUseCount(code, tableName);
    } else {
      dataOperationMapper.decrementUseCount(code, tableName);
    }

    //if (StringUtils.isEmpty(sourceCode)) {
    //  return;
    //}
    //if (status == 0) {
    //  //数据源使用数量-1
    //  if (DataFileTypeEnum.DATA.name().equals(dataType)) {
    //    datasourceService.decrementUseCount(sourceCode);
    //  }else {
    //    refererSourceService.decrementUseCount(sourceCode);
    //  }
    //}else {
    //  //数据源使用数量+1
    //  if (DataFileTypeEnum.DATA.name().equals(dataType)) {
    //    datasourceService.incrementUseCount(sourceCode);
    //  }else {
    //    refererSourceService.incrementUseCount(sourceCode);
    //  }
    //}
  }

  @Override
  public void initDataStatus(String dataId, String dataType, String sourceCode) {
    String tableNameById = SystemFunctionUtil.getTableNameById(dataId, dataType);
    changeDataStatusById(dataId, false, tableNameById, sourceCode, dataType);
  }

  @Override
  public DataSearchResponseDTO getDataByPage(DataOperationRequestDTO request) {
    //实际的pageNum应该是当前查询的页码减去上一次的页码
    int pageSize = request.getPageSize() / 10;
    int startIndex = 0;
    if (request.getLastPageNum() < request.getPageNum()) {
      startIndex = ((request.getPageNum() - request.getLastPageNum()) - 1) * pageSize;
    } else {
      startIndex = ((request.getLastPageNum() - request.getPageNum()) - 1) * pageSize;
    }
    if (DataFileTypeEnum.DATA.name().equals(request.getDataType())) {
      DatasourceInfo datasourceInfo = datasourceService.getDataSourceByCode(
          request.getDataSourceCode());
      List<DataOperationInfo> infos = dataOperationMapper.getDataByPage(request, startIndex,
          pageSize);
      return new DataSearchResponseDTO(infos, datasourceInfo.getDataCount());
    } else {
      RefererSourceInfo refererSourceInfo = refererSourceService.getRefererSourceByCode(
          request.getDataSourceCode());
      List<DataOperationInfo> infos = dataOperationMapper.getRefererDataByPage(request, startIndex,
          pageSize);
      return new DataSearchResponseDTO(infos, refererSourceInfo.getDataCount());
    }
  }

  @Override
  public void deleteByFileCode(String fileCode, String datasourceCode, String dataType) {
    //new Thread(() -> {
    deleteFromTable(fileCode, datasourceCode, dataType);
    //}).start();
  }

  @Override
  public List<DataOperationInfo> getDataByPageOld(DataOperationRequestDTO request) {
    PageHelper.startPage(request.getPageNum(), request.getPageSize());
    return dataOperationMapper.getDataByPageOld(request);
  }

  @Override
  public void exportData(DataOperationRequestDTO request, String fileCode, String userName)
      throws IOException {
    if (StringUtils.isEmpty(request.getFileCode())) {
      throw new ServiceException("必须选中哪一个文件");
    }
    //先查询出总记录数量
    List<Integer> dataCount = getAllDataCount(request);
    //添加进度redisKey
    String redisKey = DATA_EXPORT_KEY + request.getOperationBy() + ":" + fileCode;    //初始化文件
    String fileName = userName + "-" + DateUtil.nowDate(DateConstants.DATE_FORMAT_FULL_CN)
            + "-" + fileCode + "-export.csv";
    int allCount = dataCount.stream().reduce(0, Integer::sum);
    //总数存入缓存
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_EXPORT_PROGRESS_ALL_COUNT, allCount);
    ////存入当前任务状态
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_EXPORT_STATUS, ImportStatusEnum.EXPORTING.name());
    //文件名
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_EXPORT_FILE_NAME, fileName);
    //导入数据时间
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_EXPORT_FILE_TIME, DateUtil.nowDateTimeStr());
    redisCache.expire(redisKey, 1, TimeUnit.HOURS);
    //查询当前需要导出的数据导入时候的表头
    String headString = dataOperationMapper.getDataHeader(request.getFileCode());
    Object[] headerList = headString.split(",");
    String[] headerListStr = headString.split(",");
    String fileDir = environment.getProperty("holatask.exportFilePath") + request.getOperationBy() + "/";
    // 确保目录存在
    File uploadDirectory = new File(fileDir);
    if (!uploadDirectory.exists()) {
      uploadDirectory.mkdirs(); // 或者使用 mkdirs 创建多级目录
    }
    //源路径
    String source = fileDir + fileName;
    CSVPrinter csvPrinter = new CSVPrinter(new FileWriter(source), CSVFormat.DEFAULT);
    csvPrinter.printRecord(headerList);
    //初始化导出文件数据库记录
    dataOperationMapper.insertExportFileHistory(fileCode, request.getOperationBy(),
        request.getDataSourceCode(), fileName);
    //分别按每张表总数分页查询出来依次写入表格中
    for (int i = 0; i < 10; i++) {
      String tableName = SystemFunctionUtil.getTableNameById(i + "00", request.getDataType());
      writeIntoFile(tableName, dataCount.get(i), csvPrinter, request, headerListStr, redisKey);
    }
    csvPrinter.flush();
    csvPrinter.close();
    //添加记录
    //添加文件表记录
    File file = new File(source);
    SysFileInfo fileInfo = new SysFileInfo(fileCode,
        fileName, "url", source, file.length(), fileName, "csv", "text/csv",
        request.getDataSourceCode(), null);
    sysFileMapper.addFile(fileInfo, request.getOperationBy());
    //更新导出表记录
    dataOperationMapper.updateExportFileHistory(fileCode, file.length());
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_EXPORT_PROGRESS_IMPORT_COUNT, allCount);
    redisCache.redisTemplate.opsForHash().put(redisKey, DATA_EXPORT_STATUS, ImportStatusEnum.END.name());
    redisCache.expire(redisKey, 1, TimeUnit.HOURS);
  }

  private void writeIntoFile(String tableName, Integer count, CSVPrinter csvPrinter,
      DataOperationRequestDTO request, String[] headerList, String redisKey) {
    if (count == 0) {
      return;
    }
    //按每次查询10000条数据计算总页数
    int EXPORT_PAGE_SIZE = 10000;
    int pageCount = (count / EXPORT_PAGE_SIZE) + 1;
    for (int j = 0; j < pageCount; j++) {
      try {
        int pageNum = j * EXPORT_PAGE_SIZE;
        List<String> dataInfos = dataOperationMapper.getDataForExport(pageNum, EXPORT_PAGE_SIZE,
            tableName, request);
        for (String dataInfo : dataInfos) {
          //解析原始文件,再按导入时的列写入文件
          JSONObject dataObject = JSON.parseObject(dataInfo);
          List<Object> rowData = new ArrayList<>();
          for (String header : headerList) {
            Object d = dataObject.get(header);
            rowData.add(ObjectUtils.isEmpty(d) ? "" : d);
          }
          //将data写入文件
          csvPrinter.printRecord(rowData);
        }
        //每完成一次缓存中已完成数+size
        redisCache.redisTemplate.opsForHash()
            .increment(redisKey, DATA_EXPORT_PROGRESS_IMPORT_COUNT, dataInfos.size());
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
  }

  @Override
  public List<Integer> getAllDataCount(DataOperationRequestDTO request) {
    if (DataFileTypeEnum.DATA.name().equals(request.getDataType())) {
      return dataOperationMapper.getAllDataCount(request);
    }
    return dataOperationMapper.getAllRefererDataCount(request);
  }

  @Override
  public List<ExportFileInfo> getExportFiles(ExportFileRequestDTO request) {
    PageHelper.startPage(request.getPageNum(), request.getPageSize());
    request.setOperationBy(SecurityUtils.checkAdmin());
    return dataOperationMapper.getExportFiles(request);
  }

  @Override
  public void updateDownloadStatus(String code, boolean status) {
    dataOperationMapper.updateDownloadStatus(code, status);
  }

  @Override
  public List<DataHistoryImportInfo> getImportHistory(DataOperationRequestDTO request) {
    return dataOperationMapper.getImportHistory(request);
  }

  @Override
  public void clearSuccessRecord(List<String> fileCodes) {
    for (String fileCode : fileCodes) {
      String redisKey = DATA_EXPORT_KEY + SecurityUtils.getUserId() + ":" + fileCode;
      redisCache.deleteObject(redisKey);
    }
  }

  @Override
  public void dataUseFailedIncrement(String missionCode, String dataCode, String tNum,
      String dateType) {
    int updateNum = dataOperationMapper.dataUseFailedIncrement(missionCode, dataCode);
    if (updateNum == 0) {
      try {
        dataOperationMapper.dataUseFailedInsert(missionCode, dataCode, tNum, dateType);
      } catch (Exception e) {
        dataOperationMapper.dataUseFailedIncrement(missionCode, dataCode);
      }
    }
  }

  @Override
  public void insertHistoryDataGetIndex(MissionInfo mission, String dataCode,
      String indexTableCode, String dateFormat, SourceConfig sourceConfig) {
    String tableName = SystemFunctionUtil.getHistoryTableNameByTime(dateFormat,
        SystemFunctionUtil.HISTORY_DATA_GET_INDEX);
    String columnAdd = "";
    String columnAddValue = "";
//    if (DuplicateConditionEnum.BY_NAME.name().equals(sourceConfig.getDuplicateCondition())) {
//      columnAdd = "`MISSION_NAME`";
//      columnAddValue = mission.getMissionName();
    if (DuplicateConditionEnum.BY_MISSION_ID.name().equals(sourceConfig.getDuplicateCondition())) {
      columnAdd = "`MISSION_CODE`";
      columnAddValue = mission.getCode();
    } else if (DuplicateConditionEnum.BY_AFFILIATE_ID.name().equals(sourceConfig.getDuplicateCondition())) {
      columnAdd = "`AFFILIATE_CODE`";
      columnAddValue = mission.getAffiliateCode();
    } else if (DuplicateConditionEnum.BY_ADVERTISER_ID.name().equals(sourceConfig.getDuplicateCondition())) {
      columnAdd = "`ADVERTISER_CODE`";
      columnAddValue = mission.getAdvertiserCode();
    } else {
      log.error("没有去重条件");
      return;
    }
    dataOperationMapper.insertHistoryDataGetIndex(columnAdd, columnAddValue, dataCode,
        indexTableCode, tableName, sourceConfig.getChooseCode());
  }

  @Override
  public void deleteHistoryDataGetIndex(String indexTableCode, String dateFormat) {
    String tableName = SystemFunctionUtil.getHistoryTableNameByTime(dateFormat,
        SystemFunctionUtil.HISTORY_DATA_GET_INDEX);
    dataOperationMapper.deleteHistoryDataGetIndex(indexTableCode, tableName);
  }

  @Override
  public void insertHistoryDataGetDetail(DataOperationInfo data, MissionInfo mission,
      String dataType, DeviceInfo deviceInfo, String dateFormat, String code, String dateFormatYmdHms, String sendCode, String missionSessionCode) {
    String tableName = SystemFunctionUtil.getHistoryTableNameByTime(dateFormat,
        SystemFunctionUtil.HISTORY_DATA_GET_DETAIL);
    dataOperationMapper.insertHistoryDataGetDetail(tableName,
        sendHistoryService.getHistoryIdByNameAndTime(dateFormat, dateFormatYmdHms, SystemFunctionUtil.HISTORY_DATA_GET_DETAIL), code,
        mission.getCode(), JSON.toJSONString(mission).getBytes(),
        deviceInfo.getCode(), JSON.toJSONString(deviceInfo).getBytes(),
        data.getCode(), JSON.toJSONString(data).getBytes(),
        data.getTNum(), dataType, data.getId(), sendCode, missionSessionCode);
  }

  @Override
  public void updateHistoryDataGetDetail(String code, boolean status, String dateFormat) {
    String tableName = SystemFunctionUtil.getHistoryTableNameByTime(dateFormat,
        SystemFunctionUtil.HISTORY_DATA_GET_DETAIL);
    dataOperationMapper.updateHistoryDataGetDetail(tableName, status, code);
  }

  @Override
  public void insertMissionGetHistory(String missionSendCode, MissionInfo missionInfo,
      DeviceInfo deviceInfo, String dateFormat, String dateFormatYmdHms) {
    String tableName = SystemFunctionUtil.getHistoryTableNameByTime(dateFormat, SystemFunctionUtil.HISTORY_MISSION_GET_DETAIL);
    dataOperationMapper.insertMissionGetHistory(tableName,
        sendHistoryService.getHistoryIdByNameAndTime(dateFormat, dateFormatYmdHms, SystemFunctionUtil.HISTORY_MISSION_GET_DETAIL),
        missionSendCode, missionInfo.getAdvertiserCode(), missionInfo.getAffiliateCode(), missionInfo.getGroupCode(), missionInfo.getCode(),
        JSON.toJSONString(missionInfo).getBytes(),
        missionInfo.getPlanCode(),
        ObjectUtils.isEmpty(missionInfo.getPlanInfo()) ? null : JSON.toJSONString(missionInfo.getPlanInfo()).getBytes(),
        deviceInfo.getCode(),
        JSON.toJSONString(deviceInfo).getBytes()
    );
  }

  @Override
  public void updateHistoryMissionGetDetail(String missionSendCode, String dateFormat,
      Boolean status, ClientMissionCompleteRequestDTO request, Date createDate) {
    String tableName = SystemFunctionUtil.getHistoryTableNameByTime(dateFormat,
        SystemFunctionUtil.HISTORY_MISSION_GET_DETAIL);
    //这里全部更新都同步做的话会堵得很慢,先把当前数据存到redis,然后用定时任务定时获取多条一起更新
    //追加到右边,从左边取
    redisCache.redisTemplate.opsForList()
        .rightPush(status ? MISSION_SUCCESS_RETURN_KEY : MISSION_FAILED_RETURN_KEY, new MissionSuccessRedisMap(tableName, missionSendCode, request, dateFormat, createDate));
  }

  @Override
  public List<DataImportHeadInfo> getImportFileHeader(String sourceCode) {
    return dataOperationMapper.getImportFileHeader(sourceCode);
  }

  @Transactional
  public void deleteFromTable(String fileCode, String datasourceCode, String dataType) {
    if (DataFileTypeEnum.DATA.name().equals(dataType)) {
      String tableName = "data_data_content_";
      for (int i = 1; i < 11; i++) {
        Integer deleteCount = dataOperationMapper.deleteDataByFileCode(fileCode, tableName + i);
        if (!StringUtils.isEmpty(datasourceCode)) {
          datasourceService.updateDataCountDecrement(datasourceCode, deleteCount);
        }
      }
    } else if (DataFileTypeEnum.REFERER.name().equals(dataType)) {

    } else {
      throw new ServiceException("不存在数据类型");
    }

  }
}
