package com.powerdata.system.paimon.impl;

import com.powerdata.core.paimon.catalog.PDHdfsUtils;
import com.powerdata.system.domain.PaimonCatalog;
import com.powerdata.system.domain.PaimonCatalogExample;
import com.powerdata.system.domain.param.PaimonFileParam;
import com.powerdata.system.mapper.PaimonCatalogMapper;
import com.powerdata.system.paimon.IUnStructuredService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author deeprado
 * @version 1.0
 * @description
 * @date 2023/7/20 15:18
 */
@Service
public class UnStructuredServiceImpl implements IUnStructuredService {

    @Value(value = "${paimonManager.hadoopUser}")
    private String hadoopUser;
    @Resource
    private PaimonCatalogMapper paimonCatalogMapper;

    @Override
    public Map<String, Object> listFile(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String hdfsUrl = paimonFileParam.getHdfsUrl();
        String dirPath = paimonFileParam.getDirPath();
        String fileName = paimonFileParam.getFileName();
        int pageNum = ObjectUtils.isEmpty(paimonFileParam.getPageNum()) ? 0 : paimonFileParam.getPageNum();
        int pageSize = ObjectUtils.isEmpty(paimonFileParam.getPageSize()) ? 0 : paimonFileParam.getPageSize();
        return PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).getFileList(dirPath, fileName, pageSize, pageNum);
    }

    @Override
    public Map<String, Object> batchUploadFileToHdfs(String catalogId, String hdfsUrl,
                                                     String dstPath, List<MultipartFile> files) throws Exception {
//        return PDHdfsUtils.build(catalogId,hdfsUrl,hadoopUser).batchUploadFileToHdfs(dstPath,files);
        //

        return null;
//        return PDHdfsUtils.build(catalogId,hdfsUrl,hadoopUser).batchUploadFileToHdfs(dstPath,files);
    }

    @Override
    public void fastDownloadHdfsFile(HttpServletRequest request, HttpServletResponse response,
                                     String catalogId, String hdfsUrl,
                                     String filePath, String fileName) throws Exception {
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).downloadHdfsFile(request, response, filePath, fileName);
    }

    @Override
    public void batchDeleteFile(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String hdfsUrl = paimonFileParam.getHdfsUrl();
        String dirPath = paimonFileParam.getFilePath();
        List<String> fileNames = paimonFileParam.getFileNames();
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).deleteFiles(dirPath, fileNames);
    }

    @Override
    public void reviewMP4AndPic(HttpServletResponse response,
                                String catalogId, String hdfsUrl, String filePath, String fileName) throws Exception {
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).reviewMP4AndPic(response, filePath, fileName);
    }

    @Override
    public String catFile(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String hdfsUrl = paimonFileParam.getHdfsUrl();
        String dirPath = paimonFileParam.getDirPath();
        String fileName = paimonFileParam.getFileName();
        return PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).catFile(dirPath, fileName);
    }

    @Override
    public void addDir(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String hdfsUrl = paimonFileParam.getHdfsUrl();
        String dirPath = paimonFileParam.getDirPath();
        String dirName = paimonFileParam.getDirName();
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).mkdirDir(dirPath, dirName);
    }

    @Override
    public Map<String, Object> checkAndDeleteFile(PaimonFileParam paimonFileParam) throws Exception {
        HashMap<String, Object> resultMap = new HashMap<>();
        ArrayList<String> successFiles = new ArrayList<>();
        ArrayList<String> errorFiles = new ArrayList<>();
        HashMap<String, String> errorMessage = new HashMap<>();
        String dirPath = paimonFileParam.getFilePath();
        String hdfsUrl = paimonFileParam.getHdfsUrl().endsWith("/") ?
                paimonFileParam.getHdfsUrl().substring(0, paimonFileParam.getHdfsUrl().length() - 1) :
                paimonFileParam.getHdfsUrl();
        List<String> fileNames = paimonFileParam.getFileNames();
        String deletePath = hdfsUrl;
        if (!dirPath.equals("/")) {
            deletePath = deletePath + dirPath;
        }

        PaimonCatalogExample paimonCatalogExample = new PaimonCatalogExample();
        paimonCatalogExample.createCriteria().andTypesNotEqualTo("file");
        List<PaimonCatalog> paimonCatalogs = paimonCatalogMapper.selectByExample(paimonCatalogExample);
        HashMap<String, String> deleteFilesMap = new HashMap<>();
        for (String fileName : fileNames) {
            deleteFilesMap.put(fileName, deletePath + "/" + fileName);
        }

        for (Map.Entry<String, String> filesEntry : deleteFilesMap.entrySet()) {
            boolean checkFlag = false;
            for (PaimonCatalog paimonCatalog : paimonCatalogs) {
                if (filesEntry.getValue().startsWith(paimonCatalog.getHdfsurl()) ||
                        paimonCatalog.getHdfsurl().startsWith(filesEntry.getValue())) {
                    errorMessage.put(filesEntry.getKey(),
                            "该文件/目录归属catalog【" + paimonCatalog.getId() + "】的数据目录，删除可能导致该catalog数据异常，确认是否强制删除");
                    errorFiles.add(filesEntry.getKey());
                    checkFlag = true;
                    break;
                }
            }
            if (checkFlag) {
                fileNames.remove(filesEntry.getKey());
            }
        }
        batchDeleteFile(paimonFileParam);
        successFiles.addAll(fileNames);
        resultMap.put("successFiles", successFiles);
        resultMap.put("errorFiles", errorFiles);
        resultMap.put("errorMessage", errorMessage);
        return resultMap;
    }

    @Override
    public void fastDownloadHdfsDir(HttpServletRequest request, HttpServletResponse response,
                                    String catalogId, String hdfsUrl, String filePath, String fileName) throws Exception {
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).downloadDir(request, response, filePath, fileName);
    }

    @Override
    public void moveFile(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String hdfsUrl = paimonFileParam.getHdfsUrl();
        String filePath = paimonFileParam.getFilePath();
        List<String> fileNames = paimonFileParam.getFileNames();
        String dirPath = paimonFileParam.getDirPath();
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).moveFile(filePath, fileNames, dirPath);
    }

    @Override
    public void renameFile(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String hdfsUrl = paimonFileParam.getHdfsUrl();
        String dirPath = paimonFileParam.getDirPath();
        String fileName = paimonFileParam.getFileName();
        String reName = paimonFileParam.getReName();
        PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser).renameFile(dirPath, fileName, reName);
    }

}
