package com.hex.ds.hdtp.core.app.data.transfer.handler.impl;

import cn.hutool.core.util.StrUtil;
import com.hex.ds.hdtp.core.app.common.util.JaveShellUtil;
import com.hex.ds.hdtp.core.app.data.configuration.service.ITableConvertService;
import com.hex.ds.hdtp.core.app.data.contrast.service.IContrastTableStructureService;
import com.hex.ds.hdtp.core.app.data.transfer.converter.IDataTransferLogConverter;
import com.hex.ds.hdtp.core.app.data.transfer.converter.IDataTransferSqlStructureConverter;
import com.hex.ds.hdtp.core.app.data.transfer.dto.DataTransferSqlStructureDto;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferListService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferLogService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferTaskService;
import com.hex.ds.hdtp.core.app.metadata.service.ISourceInfoService;
import com.hex.ds.hdtp.core.inf.common.utils.JdbcUtil;
import com.hex.ds.hdtp.core.inf.data.contrast.service.impl.DataInfoSelectHandlerContext;
import com.hex.ds.hdtp.core.inf.metadata.po.SourceConfTemplatePo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

import java.util.Map;
import java.util.stream.Collectors;

@Slf4j
@Service("ORACLE_IMPALA_FLINK")
public class OracleImpalaTransferHandlerFlinkService extends AbstractDataTransferHandlerFlinkService {

    public OracleImpalaTransferHandlerFlinkService(IDataTransferTaskService dataTransferTaskService, IDataTransferListService dataTransferListService, IDataTransferLogService dataTransferLogService, IDataTransferLogConverter dataTransferLogConverter, ITableConvertService tableConvertService, IDataTransferSqlStructureConverter dataTransferSqlStructureConverter, IContrastTableStructureService contrastTableStructureService, DataInfoSelectHandlerContext dataInfoSelectHandlerContext, JaveShellUtil javaShellUtil, ISourceInfoService sourceInfoService) {
        super(dataTransferTaskService, dataTransferListService, dataTransferLogService, dataTransferLogConverter, tableConvertService, dataTransferSqlStructureConverter, contrastTableStructureService, dataInfoSelectHandlerContext, javaShellUtil, sourceInfoService);
    }

    /**
     * @Method buildCreateSourceTable
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description 构建FLINK源表建表语句
     * @Author Yin.Yang
     * @Date 2024/3/11 19:31
     * @Version V1.0
     */
    @Override
    protected String buildCreateSourceTable(DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        StringBuilder sqlBuilder = new StringBuilder();
        String dropTemplate = "DROP TABLE IF EXISTS {};";
        String createTemplate = "CREATE TABLE IF NOT EXISTS {} ({}) WITH ({});";
        sqlBuilder.append(StrUtil.format(dropTemplate, FLINK_SOURCE_PREFIX + dataTransferSqlStructureDto.getSourceTable())).append(LINEFEED)
                .append(StrUtil.format(createTemplate, FLINK_SOURCE_PREFIX + dataTransferSqlStructureDto.getSourceTable(),
                        dataTransferSqlStructureDto.getCreateTableMetadata(), getSourceConnConf(dataTransferSqlStructureDto)));
        return sqlBuilder.toString();
    }

    @Override
    protected String buildCreateTargetTable(DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        StringBuilder hiveTargetCatalog = new StringBuilder();
        hiveTargetCatalog.append(LINEFEED).append("CREATE CATALOG TARGET_HIVE WITH (").append(LINEFEED)
                .append("  'type' = 'hive',").append(LINEFEED)
                .append(StrUtil.format("  'default-database' = '{}',", dataTransferSqlStructureDto.getTargetDatabase())).append(LINEFEED)
                .append(StrUtil.format("  'hive-conf-dir' = '{}'", dataTransferSqlStructureDto.getTargetConfList().get(6).getConfValue())).append(LINEFEED)
                .append(");").append(LINEFEED)
                .append("USE CATALOG TARGET_HIVE;").append(LINEFEED)
                .append("-- 切换到hive方言").append(LINEFEED)
                .append("SET 'table.sql-dialect' = 'hive';").append(LINEFEED).append(LINEFEED);
        return hiveTargetCatalog.toString();
    }

    /**
     * @Method buildDataInsertSql
     * @Param bizDate
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description 构建FLINK插入SQL
     * @Author Yin.Yang
     * @Date 2024/3/11 19:35
     * @Version V1.0
     */
    @Override
    public String buildDataInsertSql(String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        StringBuilder dataTransferSql = new StringBuilder();
        String template = "INSERT OVERWRITE {} SELECT {} FROM default_catalog.default_database.{} {};";
        dataTransferSql.append(StrUtil.format(template, dataTransferSqlStructureDto.getTargetDatabase() + "." + dataTransferSqlStructureDto.getTargetTable() + LINEFEED,
                dataTransferSqlStructureDto.getSelectTableMetadata(bizDate) + ",'" + bizDate + "'" + LINEFEED,
                FLINK_SOURCE_PREFIX + dataTransferSqlStructureDto.getSourceTable(), buildInsertWhere(bizDate, dataTransferSqlStructureDto)));
//        dataTransferSql.append("invalidate metadata;");
        return dataTransferSql.toString();
    }

    /**
     * @Method getSourceConnConf
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description Flink源表配置信息
     * @Author Yin.Yang
     * @Date 2024/3/11 19:22
     * @Version V1.0
     */
    protected String getSourceConnConf(DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        Map<String, String> sourceConfMap = dataTransferSqlStructureDto.getSourceConfList()
                .stream().collect(Collectors.toMap(SourceConfTemplatePo::getConfName, SourceConfTemplatePo::getConfValue));
        sourceConfMap.put("sourceType", dataTransferSqlStructureDto.getSourceConfList().get(0).getConfType());
        StringBuilder jdcbConnConf = new StringBuilder();
        jdcbConnConf.append(LINEFEED);
        jdcbConnConf.append("  'connector' = 'jdbc',").append(LINEFEED);
        jdcbConnConf.append(StrUtil.format("  'url' = '{}', ", JdbcUtil.getJDBCUrl(sourceConfMap))).append(LINEFEED);
        jdcbConnConf.append(StrUtil.format("  'username' = '{}', ", sourceConfMap.get("username"))).append(LINEFEED);
        jdcbConnConf.append(StrUtil.format("  'password' = '{}', ", sourceConfMap.get("password"))).append(LINEFEED);
        jdcbConnConf.append(StrUtil.format("  'table-name' = '{}', ", dataTransferSqlStructureDto.getSourceTableName())).append(LINEFEED);
        jdcbConnConf.append("  'connection.max-retry-timeout' = '60S',").append(LINEFEED);
        jdcbConnConf.append("  'scan.fetch-size' = '0',").append(LINEFEED);
        jdcbConnConf.append("  'scan.auto-commit' = 'true'").append(LINEFEED);
        return jdcbConnConf.toString();
    }

}
