package com.powerdata.system.paimon.impl;

import com.powerdata.common.utils.StringUtils;
import com.powerdata.common.utils.file.FileUtils;

import com.powerdata.core.paimon.catalog.PDFileUtils;
import com.powerdata.core.paimon.catalog.PDHdfsUtils;
import com.powerdata.core.paimon.engine.*;
import com.powerdata.core.paimon.PDPaimonUtils;
import com.powerdata.system.domain.PaimonCatalog;
import com.powerdata.system.domain.PaimonCatalogExample;
import com.powerdata.system.domain.param.PaimonCatalogParam;
import com.powerdata.system.mapper.PaimonCatalogMapper;
import com.powerdata.system.paimon.ICatalogService;
import jodd.io.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.util.*;
import java.util.stream.Collectors;

import static com.powerdata.common.utils.SecurityUtils.getUsername;

/**
 * @author deeprado
 * @version 1.0
 * @description
 * @date 2023/6/12 11:10
 */
@Service
public class CatalogServiceImpl implements ICatalogService {

    private static final Logger log = LoggerFactory.getLogger(CatalogServiceImpl.class);

    @Value(value = "${paimonManager.hiveConf}")
    private String uploadHiveFilePath;

    @Value(value = "${paimonManager.hadoopUser}")
    private String hadoopUser;

    @Resource
    private PaimonCatalogMapper paimonCatalogMapper;

    @Override
    public Map<String, Object> catalogList(PaimonCatalogParam paimonCatalogParam) {
        HashMap<String, Object> result = new HashMap<>();
        int pageSize = ObjectUtils.isEmpty(paimonCatalogParam.getPageSize()) ? 10 : paimonCatalogParam.getPageSize();
        int pageNum = ObjectUtils.isEmpty(paimonCatalogParam.getPageNum()) ? 1 : paimonCatalogParam.getPageNum();
        String orderByColumn =
                StringUtils.isEmpty(paimonCatalogParam.getOrderByColumn()) ? "id" : paimonCatalogParam.getOrderByColumn();
        String isAsc = StringUtils.isEmpty(paimonCatalogParam.getIsAsc()) ? "desc" : paimonCatalogParam.getIsAsc();
        PaimonCatalogExample paimonCatalogExample = new PaimonCatalogExample();
        PaimonCatalogExample.Criteria criteria = paimonCatalogExample.createCriteria();
        if (StringUtils.isNotEmpty(paimonCatalogParam.getId())) {
            criteria.andIdLike("%" + paimonCatalogParam.getId() + "%");
        }
        if (StringUtils.isNotEmpty(paimonCatalogParam.getDescs())) {
            criteria.andDescsLike("%" + paimonCatalogParam.getDescs() + "%");
        }
        if (StringUtils.isNotEmpty(paimonCatalogParam.getTypes())) {
            criteria.andTypesLike("%" + paimonCatalogParam.getTypes() + "%");
        }
        if (StringUtils.isNotEmpty(paimonCatalogParam.getHiveurl())) {
            criteria.andHiveurlLike("%" + paimonCatalogParam.getHiveurl() + "%");
        }
        if (StringUtils.isNotEmpty(paimonCatalogParam.getHdfsurl())) {
            criteria.andHdfsurlLike("%" + paimonCatalogParam.getHdfsurl() + "%");
        }
        List<PaimonCatalog> paimonCatalogs = paimonCatalogMapper.selectByExample(paimonCatalogExample);
        if (ObjectUtils.isEmpty(paimonCatalogs)) {
            result.put("total", 0);
            result.put("list", null);
            return result;
        }
        result.put("total", paimonCatalogs.size());

        paimonCatalogExample.setOrderByClause(" " + orderByColumn + " " + isAsc + " limit " + (pageNum - 1) * pageSize + "," + pageSize);
        result.put("list", paimonCatalogMapper.selectByExample(paimonCatalogExample));
        return result;
    }

    @Override
    public void addCatalog(PaimonCatalog paimonCatalog) throws Exception {
        String catalogId = paimonCatalog.getId();
        if (!ObjectUtils.isEmpty(paimonCatalogMapper.selectByPrimaryKey(catalogId))) {
            throw new Exception("catalog的id已存在");
        }
        checkPaimonParam(paimonCatalog, catalogId);
        try {
            checkAndInitCatalogForUtils(paimonCatalog);
        } catch (Exception e) {
            removeCatalogForUtils(catalogId);
            throw new Exception("请重新确认目录配置路径信息是否正确，新增目录初始化失败：" + e);
        }
        long time = new Date().getTime();
        String username = getUsername();
        paimonCatalog.setCreataby(username);
        paimonCatalog.setCreatetime(time + "");
        paimonCatalog.setModifyby(username);
        paimonCatalog.setModifytime(time + "");
        paimonCatalogMapper.insertSelective(paimonCatalog);
    }

    @Override
    @Transactional
    public void modifyCatalog(PaimonCatalog paimonCatalog) throws Exception {
        String catalogId = paimonCatalog.getId();
        if (ObjectUtils.isEmpty(paimonCatalogMapper.selectByPrimaryKey(catalogId))) {
            throw new Exception("该catalog已删除");
        }
        checkPaimonParam(paimonCatalog, catalogId);
        try {
            removeCatalogForUtils(catalogId);
            checkAndInitCatalogForUtils(paimonCatalog);
        } catch (Exception e) {
            removeCatalogForUtils(catalogId);
            throw new Exception("请重新确认目录配置路径信息是否正确，新增目录初始化失败：" + e);
        } finally {
            PaimonCatalog oldPaimonCatalog = paimonCatalogMapper.selectByPrimaryKey(catalogId);
            try {
                checkAndInitCatalogForUtils(oldPaimonCatalog);
            } catch (Exception e2) {
                e2.printStackTrace();
            }
        }
        long time = new Date().getTime();
        String username = getUsername();
        paimonCatalog.setModifyby(username);
        paimonCatalog.setModifytime(time + "");
        paimonCatalogMapper.updateByPrimaryKeySelective(paimonCatalog);
        if ("file".equals(paimonCatalog.getTypes())) {
            PDHdfsUtils.fileSystemMap.remove(catalogId);
        } else {
            PDPaimonUtils.hadoopCatalogHashMap.remove(paimonCatalog.getId());
        }
    }

    private void checkPaimonParam(PaimonCatalog paimonCatalog, String catalogId) throws Exception {
        if (paimonCatalog.getHdfsurl().endsWith("/")) {
            paimonCatalog.setHdfsurl(paimonCatalog.getHdfsurl().substring(0, paimonCatalog.getHdfsurl().length() - 1));
        }
        if ("hive".equals(paimonCatalog.getTypes())) {
            List<String> hiveCatalogFiles = getHiveCatalogFiles(catalogId);
            if (ObjectUtils.isEmpty(hiveCatalogFiles) || hiveCatalogFiles.size() < 3) {
                throw new Exception("请上传完整hiveCatalog配置文件（hive-site.xml、core-site.xml、hdfs-site.xml）");
            }
        }
    }

    @Override
    @Transactional
    public void deleteCatalog(PaimonCatalog paimonCatalog) throws Exception {
        String catalogId = paimonCatalog.getId();
        if (ObjectUtils.isEmpty(paimonCatalogMapper.selectByPrimaryKey(catalogId))) {
            throw new Exception("该catalog已删除");
        }
        paimonCatalogMapper.deleteByPrimaryKey(catalogId);
        if ("hive".equals(paimonCatalog.getTypes())) {
            deleteAllHiveCatalogFiles(paimonCatalog.getId());
        }
        removeCatalogForUtils(catalogId);
    }

    @Override
    public List<String> upLoadHiveCatalogFile(MultipartFile file, String catalogId) throws Exception {
        String srcPath = uploadHiveFilePath + catalogId + "/";
        String fileName = file.getOriginalFilename();
        String dstStr = srcPath + fileName;
        File dir = new File(srcPath);
        if (!(dir.exists() && dir.isDirectory())) {
            dir.mkdirs();
        }
        File dstFile = new File(dstStr);
        if (dstFile.exists()) {
            dstFile.delete();
        }
        try {
            file.transferTo(dstFile);
            return Arrays.stream(Objects.requireNonNull(dir.listFiles())).map(File::getName).collect(Collectors.toList());
        } catch (FileNotFoundException e) {
            throw new IOException(e.getMessage());
        }
    }

    @Override
    public List<String> getHiveCatalogFiles(String catalogId) throws Exception {
        String srcPath = uploadHiveFilePath + catalogId + "/";
        File dir = new File(srcPath);
        if (!(dir.exists() && dir.isDirectory())) {
            dir.mkdirs();
        }
        return Arrays.stream(Objects.requireNonNull(dir.listFiles())).map(File::getName).collect(Collectors.toList());
    }

    @Override
    public List<String> deleteHiveCatalogFiles(String catalogId, String types) throws Exception {
        List<String> typeList = Arrays.asList("core", "hdfs", "hive");
        if (!typeList.contains(types)) {
            throw new Exception("文件类型不存在");
        }
        File dir = new File(uploadHiveFilePath + catalogId);
        String deleteFile = uploadHiveFilePath + catalogId + "/" + types + "-site.xml";
        File file = new File(deleteFile);
        FileUtil.delete(file);
        return Arrays.stream(Objects.requireNonNull(dir.listFiles())).map(File::getName).collect(Collectors.toList());
    }

    public void deleteAllHiveCatalogFiles(String catalogId) throws Exception {
        List<String> typeList = Arrays.asList("core", "hdfs", "hive");
        for (String types : typeList) {
            String deleteFile = uploadHiveFilePath + catalogId + "/" + types + "-site.xml";
            File file = new File(deleteFile);
            System.out.println(deleteFile);
            file.delete();
        }
    }

    @Override
    public void downloadHiveCatalogFile(HttpServletRequest request, HttpServletResponse response,
                                        String catalogId, String types) throws Exception {
        String downloadFile = uploadHiveFilePath + catalogId + "/" + types + "-site.xml";
        File file = new File(downloadFile);
        if (!file.exists() || !file.isFile()) {
            throw new Exception("文件已不存在，无法下载");
        }
        try {
            FileUtils.download(request, response, downloadFile, types + "-site.xml");
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception(e.getMessage());
        }
    }

    private void removeCatalogForUtils(String catalogId) {
        PDPaimonFlinkUtils.paimonFlinkClientsMap.remove(catalogId);
        PDPaimonSparkUtils.paimonSparkClientsMap.remove(catalogId);
        PDPaimonUtils.hadoopCatalogHashMap.remove(catalogId);
        PDHdfsUtils.fileSystemMap.remove(catalogId);
    }

    private void checkAndInitCatalogForUtils(PaimonCatalog paimonCatalog) throws Exception {
        String catalogId = paimonCatalog.getId();
        String types = paimonCatalog.getTypes();
        String hdfsUrl = paimonCatalog.getHdfsurl();
        String hiveUrl = paimonCatalog.getHiveurl();
        Map<String, Object> extraProperties = new HashMap<>();
        extraProperties.put("hiveUser", paimonCatalog.getHiveuser());
        extraProperties.put("hivePassword", paimonCatalog.getHivepassword());
        extraProperties.put("jdbcUri", paimonCatalog.getJdbcuri());
        extraProperties.put("jdbcUser", paimonCatalog.getJdbcuser());
        extraProperties.put("jdbcPassword", paimonCatalog.getJdbcpassword());
        if ("file".equals(types)) {
            PDFileUtils.build(catalogId, hdfsUrl, hadoopUser);
        } else if ("hadoop".equals(types)) {
            PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser);
        } else if ("jdbc".equals(types)) {
            PDHdfsUtils.build(catalogId, hdfsUrl, hadoopUser);
        } else if ("hive".equals(types)) {
            PDPaimonUtils build = PDPaimonUtils.build(catalogId, types, hiveUrl, hdfsUrl, hadoopUser, uploadHiveFilePath,
                    extraProperties);
            build.listDataBases();

            // TODO
            PDPaimonSparkUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser);
            PDPaimonFlinkUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser);
            PDPaimonDorisUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser);
//            PDPaimonStarRocksUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser);
//            PDPaimonTrinoUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser);
        } else {
            System.out.println("error");
        }
    }

    @Override
    public void initCatalogUtils() {
        log.info("初始化目录开始");
        PaimonCatalogExample paimonCatalogExample = new PaimonCatalogExample();
        List<PaimonCatalog> paimonCatalogs = paimonCatalogMapper.selectByExample(paimonCatalogExample);
        for (PaimonCatalog paimonCatalog : paimonCatalogs) {
            try {
                log.info("开始初始化目录【" + paimonCatalog.getId() + "】");
                checkAndInitCatalogForUtils(paimonCatalog);
                log.info("初始化目录【" + paimonCatalog.getId() + "】成功");
            } catch (Exception e) {
                log.info("初始化目录【" + paimonCatalog.getId() + "】失败" + e.getMessage());
                e.printStackTrace();
            }
        }
        log.info("初始化目录完成");
    }
}
