package com.athui.utils.explorer.metadata;

import com.athui.bean.metadata.ColumnMetaData;
import com.athui.bean.config.ConnectInfo;
import com.athui.bean.config.DataType;
import com.athui.bean.metadata.TableMetaData;
import com.athui.utils.common.ConfigureUtils;
import com.athui.utils.common.jdbc.JdbcResultSetBuilder;
import com.athui.utils.common.jdbc.JdbcUtils;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * @description: TODO
 * @autor: zhangzhonghui
 * @create: 2024-08-16 23:25
 * @Version: 1.0
 */
public abstract class MetaStoreExplorer extends JdbcUtils implements MetaDataExplorer{

    public abstract Map<String, DataType> typeMapping();

    public abstract TableMetaData tableMetaDataBuilder(String schemaName, String tableName);


    public abstract String queryColumnMetaDataSql(String schemaName, String tableName);

    public abstract ColumnMetaData columnMetaDataBuilder(ResultSet resultSet) throws SQLException ;



    public  TableMetaData build(String schemaName,String tableName){

        TableMetaData tableMetaData = tableMetaDataBuilder(schemaName, tableName);

        Map<String, Object> params =new HashMap<>();
        params.put("schema_name",schemaName);
        params.put("table_name",tableName);

        ConfigureUtils.mapperParser("hiveTableMetaStore", params).toString();

        List<ColumnMetaData> result = operation(new JdbcResultSetBuilder<List<ColumnMetaData>>() {

            @Override
            public String buildSql() {
                return queryColumnMetaDataSql(schemaName, tableName);
            }

            @Override
            public List<ColumnMetaData> accept(ResultSet resultSet) throws SQLException {
                Map<String, DataType> flinkTypeMapping = typeMapping();

                List<ColumnMetaData> columnMetaDataList = new ArrayList<>();

                int index = 0;
                while (resultSet.next()) {

                    index+=1;
                    ColumnMetaData columnMetaData = columnMetaDataBuilder(resultSet);

                    // 编号
                    String columnCode = String.format("%s_%s_%s_%s",
                            "",
                            schemaName,
                            tableName,
                            columnMetaData.getColumnName()
                    );

                    columnMetaData.setColumnCode(DigestUtils.md5Hex(columnCode));

                    // 序号，计算完成之后，字段顺序可能会发生改变，用于排序
                    columnMetaData.setIndex(String.valueOf(index));

                    // 设置 Flink 数据类型
                    DataType dataType = flinkTypeMapping.get(columnMetaData.getOriginalColumnType());
                    // 转换成 Int 主要是用于观察异常
                    if (dataType == null){
                        dataType = new DataType(BasicTypeInfo.INT_TYPE_INFO, DataType.NUMERICAL);
                    }

                    // 设置 flink 数据类型
                    columnMetaData.setFlinkColumnType(dataType.getFlinkType());
                    // 设置 类型标签
                    columnMetaData.setTypeLabel(dataType.getLabel());

                    columnMetaDataList.add(columnMetaData);
                }
                return columnMetaDataList;
            }
        });

        tableMetaData.setColumnMetaDataList(result);

        return tableMetaData;
    }




    @Override
    public List<TableMetaData> build(Map<String, String> tableMap) {
        return tableMap.keySet().stream()
                .map(tbl ->build(tableMap.get(tbl), tbl))
                .collect(Collectors.toList());
    }

    public MetaDataExplorer setConnectInfo(ConnectInfo connectInfo) {
        super.putConnectInfo(connectInfo);
        return this;
    }



}
