package org.zjvis.datascience.web.controller;

import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import java.io.File;
import java.util.List;
import java.util.Objects;
import javax.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.dao.QueryTimeoutException;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import org.zjvis.datascience.common.dto.DatasetImportResDTO;
import org.zjvis.datascience.common.dto.FileUploadDTO;
import org.zjvis.datascience.common.dto.FileUploadRequestDTO;
import org.zjvis.datascience.common.exception.DataScienceException;
import org.zjvis.datascience.common.model.ApiResult;
import org.zjvis.datascience.common.model.ApiResultCode;
import org.zjvis.datascience.common.pool.ExcelPool;
import org.zjvis.datascience.common.vo.dataset.DatasetAndHeadVO;
import org.zjvis.datascience.common.vo.dataset.DatasetColInfoVO;
import org.zjvis.datascience.common.vo.dataset.DatasetFileWriteFileVO;
import org.zjvis.datascience.common.vo.dataset.DatasetFileWriteVO;
import org.zjvis.datascience.common.vo.dataset.DatasetFilesWriteVO;
import org.zjvis.datascience.service.SemanticService;
import org.zjvis.datascience.service.SftpConnectService;
import org.zjvis.datascience.service.csv.CsvSemiotic;
import org.zjvis.datascience.service.csv.dto.ErrorInfo;

/**
 * @description 数据集管理接口 Controller
 * @date 2021-12-17
 */
@Api(tags = "数据集管理")
@RestController
@ResponseBody
@RequestMapping("/dataset")
public class SftpController {

    private final static Logger logger = LoggerFactory.getLogger(SftpController.class);

    @Autowired
    SftpConnectService sftpConnectService;

    @Autowired
    SemanticService semanticService;

    @Value("${sftp.tempfilepath}")
    private String tempfilepath;

    @Value("${upload.folderpath}")
    private String sliceFolderPath;

    /**
     * 文件接收
     * @param fileUploadRequestDTO
     * @return
     */
    @PostMapping("/uploadFiles")
    @ApiOperation(value = "文件上传浏览数据", notes = "文件上传浏览数据")
    public ApiResult<FileUploadDTO> uploadFiles(@Valid FileUploadRequestDTO fileUploadRequestDTO) {
        File dir = new File(tempfilepath);
        if (!dir.exists()) {
            dir.mkdirs();
        }
        if ("tab".equals(fileUploadRequestDTO.getSeparate())) {
            fileUploadRequestDTO.setSeparate("\t");
        } else if ("space".equals(fileUploadRequestDTO.getSeparate())) {
            fileUploadRequestDTO.setSeparate(" ");
        }
        String name = Objects.requireNonNull(fileUploadRequestDTO.getFilename().toLowerCase());
        if (!name.endsWith(".xls") && !name.endsWith(".xlsx")) {
            CsvSemiotic csvSemiotic = new CsvSemiotic(fileUploadRequestDTO.getSeparate(),
                    fileUploadRequestDTO.getQuote(),
                    fileUploadRequestDTO.getEscape());
            // 用于新建该编码的根目录
            String formatName = generateFileDifferentFormatName(fileUploadRequestDTO.getCharSet(), csvSemiotic);
            // 放系列文件的目录需要预先设好
            dir = new File(sliceFolderPath + File.separatorChar + fileUploadRequestDTO.getIdentifier() + File.separatorChar + formatName);
            if (!dir.exists()) {
                dir.mkdirs();
            }
        }
        if (!name.endsWith(".xls") && !name.endsWith(".xlsx") && fileUploadRequestDTO.getTotalChunks() > 1) {
            try {
                // 如果失败那么中断, 后续能成功的会继续传， 最终前端需要判断是否失败了，主要是看返回时候含该结果
                FileUploadDTO result = sftpConnectService.sliceUpload(fileUploadRequestDTO);
                return ApiResult.valueOf(ApiResultCode.SUCCESS, result, result.getTips());
            } catch (DataScienceException e) {
                logger.error("sliceUpload Exception: ", e);
//                return ApiResult.valueOf(ApiResultCode.UPLOAD_ERROR);
                return e.getApiResult();
            } catch (QueryTimeoutException e) {
                logger.error("checkFileMd5 redis 超时获取，请重新执行操作", e);
                sftpConnectService.deleteFileCatchInfo(fileUploadRequestDTO.getIdentifier());
                return ApiResult.valueOf(ApiResultCode.SLICE_UPLOAD_REDIS_TIMEOUT);
            }
        } else {
            if (!name.endsWith(".csv") && !name.endsWith(".xls") && !name.endsWith(".xlsx")) {
                logger.warn("API /dataset/uploadFiles failed, since {}", ApiResultCode.FILE_SUPPORT_ERROR.getMessage());
                return ApiResult.valueOf(ApiResultCode.FILE_SUPPORT_ERROR);
            }
            if (fileUploadRequestDTO.getFile().getSize() == 0) {
                logger.warn("API /dataset/uploadFiles failed, since uploading file's size is 0");
                return ApiResult.valueOf(ApiResultCode.CONTENT_ERROR);
            }
            try {
                FileUploadDTO result = sftpConnectService.previewDataset(fileUploadRequestDTO);
                return ApiResult.valueOf(ApiResultCode.SUCCESS, result, result.getTips());
            } catch (DataScienceException e) {
                logger.error("previewDataset error DataScienceException: ", e);
                return e.getApiResult();
            }
        }
    }


    /**
     * 文件写入数据库
     *
     * @param vo
     * @return
     */
    @PostMapping("/fileWrite")
    @ApiOperation(value = "文件写入数据库", notes = "文件写入数据库")
    public ApiResult<DatasetImportResDTO> FileWrite(@Valid @RequestBody DatasetFilesWriteVO vo) {
        if ("tab".equals(vo.getSeparate())) {
            vo.setSeparate("\t");
        } else if ("space".equals(vo.getSeparate())) {
            vo.setSeparate(" ");
        }
        if ("csv".equals(vo.getImportType()) && !vo.getFirstImport()) {
            List<DatasetFileWriteFileVO> files = vo.getFiles();
            for (DatasetFileWriteFileVO file : files) {
                file.setFilename(file.getFilename() + "_check");
            }
            vo.setFiles(files);
        }
        DatasetImportResDTO result = sftpConnectService.writeToGP(vo, vo.getCharSet());
        if (result.getDatasetId() == -1) {
            logger.warn("API /dataset/fileWrite failed, since {}", ApiResultCode.UPLOAD_ERROR.getMessage());
            return ApiResult.valueOf(ApiResultCode.UPLOAD_ERROR, null, "请检查源文件是否可正常打开");
        } else if (result.getDatasetId() == -2) {
            logger.warn("API /dataset/fileWrite failed, since Redis RedisCommandTimeoutException");
            return ApiResult.valueOf(ApiResultCode.UPLOAD_ERROR, null, "文件过大，请转为CSV后重新上传");
        }
        return ApiResult.valueOf(result);
    }

    /**
     * 获取文件校验结果
     *
     * @param vo
     * @return
     */
    @PostMapping("/parseErrorFile")
    @ApiOperation(value = "解析错误文件", notes = "解析错误的文件")
    public ApiResult<ErrorInfo> parseErrorFile(@Valid @RequestBody DatasetFileWriteVO vo) {
        if ("tab".equals(vo.getSeparate())) {
            vo.setSeparate("\t");
        } else if ("space".equals(vo.getSeparate())) {
            vo.setSeparate(" ");
        }
        try {
            return ApiResult.valueOf(sftpConnectService.parseErrorCSVFile(vo));
        } catch (QueryTimeoutException e) {
            logger.error("checkFileMd5 redis 超时获取，请重新执行操作", e);
            sftpConnectService.deleteFileCatchInfo(vo.getIdentifier());
            return ApiResult.valueOf(ApiResultCode.SLICE_UPLOAD_REDIS_TIMEOUT);
        }
    }

    @PostMapping("/generateMask")
    @ApiOperation(value = "预览数据脱敏", notes = "让预览的数据脱敏")
    public ApiResult<DatasetAndHeadVO> GenerateMask(@Valid @RequestBody DatasetColInfoVO vo) {
        DatasetAndHeadVO result = sftpConnectService.generateMask(vo);
        return ApiResult.valueOf(result);
    }

    /**
     * 前端计算MD5值, 如果complete那么前端可以读取预览信息
     * @param fileUploadRequestDTO
     * @return
     */
    @PostMapping("/checkFileMd5")
    @ApiOperation(value = "检查文件MD5，查询文件历史上传记录", notes = "查看历史是否上传与其状态")
    public ApiResult<FileUploadDTO> checkFileMd5(FileUploadRequestDTO fileUploadRequestDTO) {
        // 检查是否是目标文件
        String name = fileUploadRequestDTO.getFilename();
        if (!name.endsWith(".csv") && !name.endsWith(".xls") && !name.endsWith(".xlsx")) {
            logger.warn("API /dataset/uploadFiles failed, since {}", ApiResultCode.FILE_SUPPORT_ERROR.getMessage());
            return ApiResult.valueOf(ApiResultCode.FILE_SUPPORT_ERROR);
        }
        if ("tab".equals(fileUploadRequestDTO.getSeparate())) {
            fileUploadRequestDTO.setSeparate("\t");
        } else if ("space".equals(fileUploadRequestDTO.getSeparate())) {
            fileUploadRequestDTO.setSeparate(" ");
        }
        CsvSemiotic csvSemiotic = new CsvSemiotic(fileUploadRequestDTO.getSeparate(),
                fileUploadRequestDTO.getQuote(),
                fileUploadRequestDTO.getEscape());
        String formatName = generateFileDifferentFormatName(fileUploadRequestDTO.getCharSet(), csvSemiotic);
        File dir = new File(sliceFolderPath + File.separatorChar + fileUploadRequestDTO.getIdentifier() + File.separator + formatName);
        if (!dir.exists()) {
            dir.mkdirs();
        }

        try {
            FileUploadDTO result = sftpConnectService.checkFileMd5(fileUploadRequestDTO);
            return ApiResult.valueOf(result);
        } catch (QueryTimeoutException e) {
            logger.error("checkFileMd5 redis 超时获取，请重新执行操作", e);
            sftpConnectService.deleteFileCatchInfo(fileUploadRequestDTO.getIdentifier());
            return ApiResult.valueOf(ApiResultCode.SLICE_UPLOAD_REDIS_TIMEOUT);
        }  catch (DataScienceException e) {
            logger.error("checkFileMd5 Exception: ", e);
//                return ApiResult.valueOf(ApiResultCode.UPLOAD_ERROR);
            return e.getApiResult();
        }
    }

    @PostMapping("/checkProgress")
    @ApiOperation(value = "检查进度条", notes = "查看checkFile的进度")
    public ApiResult<FileUploadDTO> checkProgress(FileUploadRequestDTO fileUploadRequestDTO) {
        if ("tab".equals(fileUploadRequestDTO.getSeparate())) {
            fileUploadRequestDTO.setSeparate("\t");
        } else if ("space".equals(fileUploadRequestDTO.getSeparate())) {
            fileUploadRequestDTO.setSeparate(" ");
        }
        FileUploadDTO result = sftpConnectService.checkProgress(fileUploadRequestDTO);
        return ApiResult.valueOf(result);
    }

    private String generateFileDifferentFormatName(String charSet, CsvSemiotic csvSemiotic) {
        return charSet + csvSemiotic.getSeparate().getValue() + csvSemiotic.getEscape().getValue() + csvSemiotic.getQuote().getValue();
    }

}
