package com.powerdata.system.paimon.impl;

import cn.hutool.core.thread.ThreadUtil;

import com.powerdata.common.utils.excel.ExcelSheetData;
import com.powerdata.common.utils.excel.ExcelXlsReader;
import com.powerdata.common.utils.excel.ExcelXlsxReader;

import com.powerdata.core.paimon.bean.table.*;
import com.powerdata.core.paimon.catalog.PDHdfsUtils;
import com.powerdata.core.paimon.engine.PDPaimonSparkUtils;
import com.powerdata.core.paimon.PDPaimonUtils;
import com.powerdata.core.paimon.enums.PaimonBaseType;
import com.powerdata.system.domain.param.*;
import com.powerdata.system.paimon.ITableService;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.*;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * @author deeprado
 * @version 1.0
 * @description
 * @date 2023/6/12 16:52
 */
@Service
public class TableServiceImpl implements ITableService {
    @Value(value = "${paimonManager.hadoopUser}")
    private String hadoopUser;
    @Value(value = "${paimonManager.hiveConf}")
    private String uploadHiveFilePath;
    @Value(value = "#{${paimonManager.extra:{}}}")
    private Map<String, Object> extraProperties;

    @Override
    public List<String> tableList(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();

        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .listTable(paimonCatalogParam.getDatabaseName());
    }

    @Override
    public TableMetricsBean getTableMetrics(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();

        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getTableMetrics(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());
    }

    @Override
    public String getSchemasOfTable(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();

        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getCreateTableSql(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName(), paimonCatalogParam.getExecType());
    }

    @Override
    public List<TableColumnsBean> getColumnsOfTable(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();

        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getColumnsOfTable(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());
    }

    @Override
    public List<TablePartitionKeysBean> getPartitionMessage(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();

        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getPartitionMessage(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());
    }

    @Override
    public TableOptionBean getOptionsMessage(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();

        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getOptionsMessage(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());
    }

    @Override
    public List<TableTransactionsBean> getTransactionsMessage(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();


        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getTransactionsMessage(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());
    }

    @Override
    public Map<String, Object> getTableData(PaimonCatalogParam paimonCatalogParam) throws Exception {
        Integer pageNum = getNum(paimonCatalogParam.getPageNum(), 1);
        Integer pageSize = getNum(paimonCatalogParam.getPageSize(), 10);
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath, extraProperties)
                .getTableData(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName(), pageSize, pageNum);
    }

    @Override
    public Map<String, Object> getDataBySnapshotId(PaimonCatalogParam paimonCatalogParam) throws Exception {
        Integer pageNum = getNum(paimonCatalogParam.getPageNum(), 1);
        Integer pageSize = getNum(paimonCatalogParam.getPageSize(), 10);
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getTableData(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName()
                        , paimonCatalogParam.getSnapshotId(), pageSize, pageNum);
    }

    @Override
    public Map<String, Object> getDataByTime(PaimonCatalogParam paimonCatalogParam) throws Exception {
        Integer pageNum = getNum(paimonCatalogParam.getPageNum(), 1);
        Integer pageSize = getNum(paimonCatalogParam.getPageSize(), 10);
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getTableDataBeforeTime(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName()
                        , paimonCatalogParam.getAsOfTime(), pageSize, pageNum);
    }

    @Override
    public void setCurrentSnapshot(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .setCurrentSnapshot(paimonCatalogParam.getDatabaseName(),
                        paimonCatalogParam.getTableName(), paimonCatalogParam.getSnapshotId());
    }

    @Override
    public void rollbackSnapshot(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .rollbackSnapshot(paimonCatalogParam.getDatabaseName(),
                        paimonCatalogParam.getTableName(), paimonCatalogParam.getSnapshotId());
    }

    @Override
    public void cherryPickSnapshot(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .cherryPickSnapshot(paimonCatalogParam.getDatabaseName(),
                        paimonCatalogParam.getTableName(), paimonCatalogParam.getSnapshotId());
    }

    @Override
    public void addData(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .addTableData(paimonCatalogParam.getDatabaseName(),
                        paimonCatalogParam.getTableName(), paimonCatalogParam.getAddData());
    }

    @Override
    public Map<String, Object> getAppendSnapshotTableData(PaimonCatalogParam paimonCatalogParam) throws Exception {
        Integer pageNum = getNum(paimonCatalogParam.getPageNum(), 1);
        Integer pageSize = getNum(paimonCatalogParam.getPageSize(), 10);
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getAppendsTableData(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName()
                        , paimonCatalogParam.getSnapshotId(), pageSize, pageNum);
    }

    @Override
    public Map<String, Object> getBetweenSnapshotTableData(PaimonCatalogParam paimonCatalogParam) throws Exception {
        Integer pageNum = getNum(paimonCatalogParam.getPageNum(), 1);
        Integer pageSize = getNum(paimonCatalogParam.getPageSize(), 10);
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getAppendsTableData(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName()
                        , paimonCatalogParam.getSnapshotId(), paimonCatalogParam.getToSnapshotId(), pageSize, pageNum);
    }

    @Override
    public void deleteTable(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .deleteTable(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());
    }

    @Override
    public void renameTable(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .renameTable(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName(),
                        paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getNewTableName());
    }

    @Override
    public void moveTable(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .renameTable(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName(),
                        paimonCatalogParam.getNewDatabaseName(), paimonCatalogParam.getNewTableName());
    }

    private int getNum(Integer pageNum, int i) {
        return ObjectUtils.allNotNull(pageNum) ? pageNum : i;
    }

    @Override
    public void createTable(PaimonTableParam paimonTableParam) throws Exception {
        PaimonOptionsDto optionsParams = paimonTableParam.getOptionsParams();
        Map<String, String> optionsMap = new HashMap<>();
        if (Optional.ofNullable(optionsParams.getBucket()).isPresent()) {
            optionsMap.put("bucket", optionsParams.getBucket());
        }
        if (Optional.ofNullable(optionsParams.getBucketKey()).isPresent()) {
            optionsMap.put("bucketKey", optionsParams.getBucketKey());
        }

        List<PaimonPartitionDto> partitionParam = paimonTableParam.getPartitionParams();
        ObjectMapper objectMapper = new ObjectMapper();
        List<Map<String, String>> partitionMapList = new ArrayList<>();
        for (PaimonPartitionDto partition : partitionParam) {
            String s = objectMapper.writeValueAsString(partition);
            partitionMapList.add(objectMapper.readValue(s, Map.class));
        }

        List<PaimonColumnDto> columnDtos = paimonTableParam.getColumnDtos();
        List<Map<String, String>> columMapList = new ArrayList<>();
        for (PaimonColumnDto column : columnDtos) {
            String s = objectMapper.writeValueAsString(column);
            columMapList.add(objectMapper.readValue(s, Map.class));
        }
        String warehouse = paimonTableParam.getHdfsurl();
        String types = paimonTableParam.getTypes();
        String hiveUrl = paimonTableParam.getHiveurl();
        PDPaimonUtils.build(paimonTableParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .createTable(paimonTableParam.getDatabaseName(), paimonTableParam.getTableName(), columMapList, partitionMapList, optionsMap);
    }

    @Override
    public void updateTable(PaimonTableParam paimonTableParam) throws Exception {
        ObjectMapper objectMapper = new ObjectMapper();
        List<PaimonPartitionDto> partitionParam = paimonTableParam.getPartitionParams();
        List<PaimonColumnDto> columnDtos = paimonTableParam.getColumnDtos();
        List<Map<String, String>> columMapList = new ArrayList<>();
        for (PaimonColumnDto column : columnDtos) {
            String s = objectMapper.writeValueAsString(column);
            columMapList.add(objectMapper.readValue(s, Map.class));
        }
        List<Map<String, String>> partitionMapList = new ArrayList<>();
        for (PaimonPartitionDto partition : partitionParam) {
            String s = objectMapper.writeValueAsString(partition);
            partitionMapList.add(objectMapper.readValue(s, Map.class));
        }

        String warehouse = paimonTableParam.getHdfsurl();
        String types = paimonTableParam.getTypes();
        String hiveUrl = paimonTableParam.getHiveurl();
        PDPaimonUtils.build(paimonTableParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .updateTable(paimonTableParam.getDatabaseName(), paimonTableParam.getTableName(), columMapList, partitionMapList);
    }

    @Override
    public PaimonTableParam queryTableInfo(PaimonTableParam paimonTableParam) throws Exception {

        PaimonTableParam result = new PaimonTableParam();

        String types = paimonTableParam.getTypes();
        String hiveUrl = paimonTableParam.getHiveurl();
        String warehouse = paimonTableParam.getHdfsurl();
        String databaseName = paimonTableParam.getDatabaseName();
        String tableName = paimonTableParam.getTableName();
        result.setHdfsurl(warehouse);
        result.setDatabaseName(databaseName);
        result.setTableName(tableName);
        PDPaimonUtils build = PDPaimonUtils.build(paimonTableParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath);
        List<TableColumnsBean> columnsOfTable = build.getColumnsOfTable(databaseName, tableName);

        List<PaimonColumnDto> columnObjects = new ArrayList<>();
        for (TableColumnsBean tableColumnsBean : columnsOfTable) {
            PaimonColumnDto columnDto = new PaimonColumnDto();
            columnDto.setColumnName(tableColumnsBean.getColumnName());
            columnDto.setComment(tableColumnsBean.getComment());
            columnDto.setIsNullable(tableColumnsBean.getIsNullable());
            columnDto.setIsPartitionKey(tableColumnsBean.getIsPartitionKey());
            columnDto.setIsPrimaryKey(tableColumnsBean.getIsPrimaryKey());
            String dataType = tableColumnsBean.getDataType();
            PaimonBaseType type = PaimonBaseType.getType(dataType);
            if (type != null) {
                columnDto.setDataType(type.name());
            } else if (dataType.contains("decimal")) {
                String pattern = "\\((.*?)\\)";
                Pattern compile = Pattern.compile(pattern);
                Matcher matcher = compile.matcher(dataType);
                String match = "";
                while (matcher.find()) {
                    match = matcher.group(1);
                }
                String[] split = match.split(",");
                columnDto.setPrecision(split[0].trim());
                columnDto.setScale(split[1].trim());
                columnDto.setDataType("Decimal");
            } else if (dataType.contains("timestamp")) {
                columnDto.setDataType("Timestamp");
                if (dataType.endsWith("tz")) {
                    columnDto.setIsWithZone("1");
                } else {
                    columnDto.setIsWithZone("0");
                }
            } else if (dataType.contains("list")) {
                columnDto.setDataType("List");
                String substring = dataType.substring(dataType.indexOf("<") + 1, dataType.indexOf(">"));
                PaimonBaseType subType = PaimonBaseType.getType(substring);
                columnDto.setValueType(subType.name());
            } else if (dataType.contains("map")) {
                columnDto.setDataType("Map");
                String substring = dataType.substring(dataType.indexOf("<") + 1, dataType.indexOf(">"));
                String[] split = substring.split(",");
                PaimonBaseType keyType = PaimonBaseType.getType(split[0]);
                PaimonBaseType valueType = PaimonBaseType.getType(split[1]);
                columnDto.setValueType(valueType.name());
                columnDto.setKeyType(keyType.name());
            }
            columnObjects.add(columnDto);
        }

        result.setColumnDtos(columnObjects);

        List<PaimonPartitionDto> partitionObjects = new ArrayList<>();
        List<TablePartitionKeysBean> partitionMessage = build.getPartitionMessage(databaseName, tableName);
        Map<String, String> collect = columnObjects.stream().collect(Collectors.toMap(PaimonColumnDto::getColumnName, PaimonColumnDto::getDataType));
        for (TablePartitionKeysBean keysBean : partitionMessage) {
            PaimonPartitionDto partitionObject = new PaimonPartitionDto();
            partitionObject.setTargetName(keysBean.getField());
            partitionObject.setType(keysBean.getTransform());
            partitionObject.setSourceName(keysBean.getSourceField());
            // 将分区字段类型给到
            partitionObject.setDateType(collect.get(partitionObject.getSourceName()));
            partitionObjects.add(partitionObject);
        }

        result.setPartitionParams(partitionObjects);
        return result;
    }

    @Override
    public void deleteData(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .deleteTableData(paimonCatalogParam.getDatabaseName(),
                        paimonCatalogParam.getTableName(), paimonCatalogParam.getAddData());
    }

    @Override
    public void importDataToTable(String id, String type, String hiveUrl, String warehouse,
                                  String databaseName, String tableName, MultipartFile file) throws Exception {
        PDPaimonUtils pdPaimonUtils = PDPaimonUtils.build(id, type, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath);
        List<TableColumnsBean> columnsOfTable = pdPaimonUtils.getColumnsOfTable(databaseName, tableName);
        ExecutorService executorService = ThreadUtil.newExecutor(10, 20, 20);
        CompletionService<Object> uploadFileCompletionService = ThreadUtil.newCompletionService(executorService);
        try {
            String filename = file.getOriginalFilename();
            // parse file
            List<ExcelSheetData> excelSheetDataList = parseDataExcel(Objects.requireNonNull(filename), file);
            for (ExcelSheetData excelSheetData : excelSheetDataList) {
                uploadFileCompletionService.submit(new Runnable() {
                    @Override
                    public void run() {
                        ArrayList<HashMap<String, String>> addDataList = new ArrayList<>();
                        List<List<String>> datums = new ArrayList<>();
//                        for (List<String> datum : excelSheetData.getData()) {
                        for (List<String> datum : datums) {
                            if (datum.size() != columnsOfTable.size()) {
                                try {
                                    throw new Exception("导入excel列数和表的列数不一致，无法导入");
                                } catch (Exception e) {
                                    e.printStackTrace();
                                }
                            }
                            HashMap<String, String> midDataMap = new HashMap<>();
                            for (int i = 0; i < datum.size(); i++) {
                                midDataMap.put(columnsOfTable.get(i).getColumnName(), datum.get(i));
                            }
                            addDataList.add(midDataMap);
                        }
                        pdPaimonUtils.addTableData(databaseName, tableName, addDataList);
                    }
                }, "success");
            }
        } catch (Exception e) {
            throw new Exception(e.getMessage());
        }
        uploadFileCompletionService.take();
        executorService.shutdown();
    }

    @Override
    public Map<String, Object> getMetadataFiles(PaimonCatalogParam paimonCatalogParam) throws Exception {
        Integer pageNum = getNum(paimonCatalogParam.getPageNum(), 1);
        Integer pageSize = getNum(paimonCatalogParam.getPageSize(), 10);
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        String databaseName = paimonCatalogParam.getDatabaseName();
        String tableName = paimonCatalogParam.getTableName();
        String fileName = paimonCatalogParam.getFileName();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getMetadataFiles(databaseName, tableName, fileName, pageSize, pageNum);
    }

    @Override
    public String getMetadata(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        String databaseName = paimonCatalogParam.getDatabaseName();
        String tableName = paimonCatalogParam.getTableName();
        String fileName = paimonCatalogParam.getFileName();
        return PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getMetadata(databaseName, tableName, fileName);
    }

    @Override
    public void mergeSmallFile(PaimonCatalogParam paimonCatalogParam) throws Exception {
        String warehouse = paimonCatalogParam.getHdfsurl();
        String types = paimonCatalogParam.getTypes();
        String hiveUrl = paimonCatalogParam.getHiveurl();
        PDPaimonUtils.build(paimonCatalogParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .mergeSmallFile(paimonCatalogParam.getDatabaseName(), paimonCatalogParam.getTableName());

    }

    @Override
    public Map<String, Object> tableList2(PaimonCatalogParam paimonCatalogParam) throws Exception {
        HashMap<String, Object> resultMap = new HashMap<>();
        String catalogId = paimonCatalogParam.getId();
        String warehouse = paimonCatalogParam.getHdfsurl();
        String databaseName = paimonCatalogParam.getDatabaseName();
        if ("hive".equals(paimonCatalogParam.getTypes())) {
            databaseName = databaseName + ".db";
        }
        String tableName = paimonCatalogParam.getTableName();
        int pageSize = ObjectUtils.isEmpty(paimonCatalogParam.getPageSize()) ? 10 : paimonCatalogParam.getPageSize();
        int pageNum = ObjectUtils.isEmpty(paimonCatalogParam.getPageNum()) ? 1 : paimonCatalogParam.getPageNum();
        List<String> tables = PDHdfsUtils.build(catalogId, warehouse, hadoopUser).getDirs(databaseName, tableName);
        resultMap.put("total", tables.size());
        resultMap.put("data", tables.stream().skip((long) (pageNum - 1) * pageSize).limit(pageSize).collect(Collectors.toList()));
        return resultMap;
    }

    @Override
    public void tableDataToFile(TableToDataParam tableToDataParam) throws Exception {
        PaimonTableParam paimonTableParam = tableToDataParam.getPaimonTableParam();
        PaimonFileParam paimonFileParam = tableToDataParam.getPaimonFileParam();
        String splitStr = tableToDataParam.getSplitStr();
        String isHead = tableToDataParam.getIsHead();
        String filePath = paimonFileParam.getFilePath();

        FSDataOutputStream fileOutPutStream =
                PDHdfsUtils.build(paimonFileParam.getCatalogId(), paimonFileParam.getHdfsUrl(), hadoopUser)
                        .getFileOutPutStream(filePath);
        addHDFSDataFromTable(fileOutPutStream, paimonTableParam, splitStr, isHead);
        //整体转储（大表有内存溢出的风险）
//        ArrayList<String> dataList = getAllDataWithSplit(paimonTableParam,splitStr,isHead);
//        listWriteToHdfs(paimonFileParam,dataList);
    }

    private void addHDFSDataFromTable(FSDataOutputStream fileOutPutStream, PaimonTableParam paimonTableParam,
                                      String splitStr, String isHead) throws Exception {
        String warehouse = paimonTableParam.getHdfsurl();
        String types = paimonTableParam.getTypes();
        String hiveUrl = paimonTableParam.getHiveurl();
        PDPaimonUtils.build(paimonTableParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .tableDataToFileWithBranch(paimonTableParam.getDatabaseName(), paimonTableParam.getTableName(),
                        fileOutPutStream, splitStr, isHead);
    }


    @Override
    public void fileToTableData(TableToDataParam tableToDataParam) throws Exception {
        PaimonTableParam paimonTableParam = tableToDataParam.getPaimonTableParam();
        PaimonFileParam paimonFileParam = tableToDataParam.getPaimonFileParam();
        String splitStr = tableToDataParam.getSplitStr();
        String isHead = tableToDataParam.getIsHead();

        PDPaimonUtils pdPaimonUtils = PDPaimonUtils.build(paimonTableParam.getId(),
                paimonTableParam.getTypes(), paimonTableParam.getHiveurl(), paimonTableParam.getHdfsurl(), hadoopUser, uploadHiveFilePath);

        FSDataInputStream fileInputStream = getFileInputStream(paimonFileParam);

        pdPaimonUtils.addTableDataFromHDFSFile(paimonTableParam.getDatabaseName(), paimonTableParam.getTableName()
                , new InputStreamReader(fileInputStream), splitStr, isHead, null);

    }

    @Override
    public void clearTableData(PaimonTableParam paimonTableParam) throws Exception {
        String id = paimonTableParam.getId();
        String warehouse = paimonTableParam.getHdfsurl();
        String types = paimonTableParam.getTypes();
        String hiveUrl = paimonTableParam.getHiveurl();
        String databaseName = paimonTableParam.getDatabaseName();
        String tableName = paimonTableParam.getTableName();
        PDPaimonSparkUtils.build(id, types, hiveUrl, warehouse, null, hadoopUser)
                .executeSql("delete from " + id + "." + databaseName + "." + tableName);
    }

    private FSDataInputStream getFileInputStream(PaimonFileParam paimonFileParam) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String warehouse = paimonFileParam.getHdfsUrl();
        String filePath = paimonFileParam.getFilePath();
        return PDHdfsUtils.build(catalogId, warehouse, hadoopUser).getFileInputStream(filePath);
    }

    @Override
    public void updateData(UpdateDataParam updateDataParam) throws Exception {
        String warehouse = updateDataParam.getHdfsurl();
        String types = updateDataParam.getTypes();
        String hiveUrl = updateDataParam.getHiveurl();
        PDPaimonUtils.build(updateDataParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .updateData(updateDataParam.getDatabaseName(), updateDataParam.getTableName(),
                        updateDataParam.getOldData(), updateDataParam.getNewData());
    }

    private void listWriteToHdfs(PaimonFileParam paimonFileParam, ArrayList<String> dataList) throws Exception {
        String catalogId = paimonFileParam.getCatalogId();
        String warehouse = paimonFileParam.getHdfsUrl();
        String filePath = paimonFileParam.getFilePath();
        PDHdfsUtils.build(catalogId, warehouse, hadoopUser).writeToFile(filePath, dataList);
    }

    private ArrayList<String> getAllDataWithSplit(PaimonTableParam paimonTableParam, String splitStr, String isHead) throws Exception {
        String warehouse = paimonTableParam.getHdfsurl();
        String types = paimonTableParam.getTypes();
        String hiveUrl = paimonTableParam.getHiveurl();
        return PDPaimonUtils.build(paimonTableParam.getId(), types, hiveUrl, warehouse, hadoopUser, uploadHiveFilePath)
                .getAllTableData(paimonTableParam.getDatabaseName(), paimonTableParam.getTableName(), splitStr, isHead);
    }

    public static List<ExcelSheetData> parseDataExcel(String filename, MultipartFile file) throws Exception {
        InputStream inputStream = file.getInputStream();
        List<ExcelSheetData> excelSheetDataList = new ArrayList<>();
        String suffix = filename.substring(filename.lastIndexOf(".") + 1);
        if (StringUtils.equalsIgnoreCase(suffix, "xls")) {
            ExcelXlsReader excelXlsReader = new ExcelXlsReader();
            excelXlsReader.process(inputStream);
            excelSheetDataList = excelXlsReader.totalSheets;
        }
        if (StringUtils.equalsIgnoreCase(suffix, "xlsx")) {
            ExcelXlsxReader excelXlsxReader = new ExcelXlsxReader();
            excelXlsxReader.process(inputStream);
            excelSheetDataList = excelXlsxReader.totalSheets;
        }

        inputStream.close();

        return excelSheetDataList;
    }
}
