package cn.hexcloud.dga.meta.service.impl;

import cn.hexcloud.dga.common.util.SqlUtil;
import cn.hexcloud.dga.meta.bean.TableMetaInfo;
import cn.hexcloud.dga.meta.bean.TableMetaInfoExtra;
import cn.hexcloud.dga.meta.bean.TableMetaInfoForQuery;
import cn.hexcloud.dga.meta.bean.TableMetaInfoVO;
import cn.hexcloud.dga.meta.mapper.TableMetaInfoMapper;
import cn.hexcloud.dga.meta.service.TableMetaInfoExtraService;
import cn.hexcloud.dga.meta.service.TableMetaInfoService;
import com.alibaba.druid.sql.visitor.functions.IfNull;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author jiangdan7
 * @since 2023-08-23
 */
@DS("dga")
@Service
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {
    // 初始化hive客户端
    @Value("${hive.metaserver.url}")
    String hiveMetaServerUrl;

    private HiveMetaStoreClient hiveMetaStoreClient;

    @Autowired
    private TableMetaInfoExtraService tableMetaInfoExtraService;

    @PostConstruct
    public void initHiveClient() {
        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf, MetastoreConf.ConfVars.THRIFT_URIS, hiveMetaServerUrl);
        try {
            hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }
    }

    @Override
    public void initTableMetaInfo(String assessDate, String schemaName) {
        try {
            List<String> tableNameList = hiveMetaStoreClient.getAllTables(schemaName);

            List<TableMetaInfo> tableMetaInfoList = new ArrayList<>(tableNameList.size());
            for (String tableName : tableNameList) {
                // 从hive中提取信息
                TableMetaInfo tableMetaInfo = addHiveInfo(schemaName, tableName);
                // 从hdfs中提取信息
                if (tableMetaInfo != null) {
                    addHdfsInfo(tableMetaInfo);
                    // 补充日期信息
                    tableMetaInfo.setAssessDate(assessDate);
                    tableMetaInfo.setCreateTime(new Date());

                    tableMetaInfoList.add(tableMetaInfo);
                }
            }
            remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate));
            saveBatch(tableMetaInfoList, 500);
            tableMetaInfoExtraService.initTableMetaExtra(tableMetaInfoList);
        } catch (MetaException e) {
            e.printStackTrace();
            throw new RuntimeException("hive查询表出错");
        }
    }

    private TableMetaInfo addHiveInfo(String schemaName, String tableName) {
        try {
            Table table = hiveMetaStoreClient.getTable(schemaName, tableName);

            StorageDescriptor sd = table.getSd();
            Map<String, String> parametersMap = table.getParameters();
            PropertyPreFilters.MySimplePropertyPreFilter preFilter = new PropertyPreFilters().addFilter("comment", "name", "type");

            // 对tableMetaInfo赋值
            return TableMetaInfo.builder()
                    .tableName(tableName)
                    .schemaName(schemaName)
                    .colNameJson(JSON.toJSONString(sd.getCols(), preFilter))
                    .partitionColNameJson(JSON.toJSONString(table.getPartitionKeys(), preFilter))
                    .tableFsOwner(table.getOwner())
                    .tableParametersJson(JSON.toJSONString(parametersMap))
                    .tableComment(parametersMap.get("comment"))
                    .tableFsPath(sd.getLocation())
                    .tableInputFormat(sd.getInputFormat())
                    .tableOutputFormat(sd.getOutputFormat())
                    .tableRowFormatSerde(JSON.toJSONString(sd.getSerdeInfo().getSerializationLib()))
                    .tableCreateTime(DateFormatUtils.format(new Date(table.getCreateTime() * 1000L), "yyyy-MM-dd HH:mm:ss"))
                    .tableType(table.getTableType())
                    .tableBucketColsJson(JSON.toJSONString(sd.getBucketCols()))
                    .tableBucketNum((long) sd.getBucketColsSize())
                    .tableSortColsJson(JSON.toJSONString(sd.getSortCols()))
                    .tableSize(0L)
                    .tableTotalSize(0L)
                    .build();
        } catch (TException e) {
            new RuntimeException(String.format("%s,hiveMetaStoreClient寻找&s.%s表失败", e, schemaName, tableName));
            return null;
        }
    }

    private void addHdfsInfo(TableMetaInfo tableMetaInfo) {
        //递归的三个 准备工作
        // 递归起始的节点
        // 递归过程使用的工具
        // 存放递归最终结果的容器
        //1  如何从hdfs中获取数据 客户端工具？Hadoop包下的 FileSystem
        try {
            FileSystem fileSystem = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()), new Configuration(), tableMetaInfo.getTableFsOwner());
            FileStatus[] tableFilesStatus = fileSystem.listStatus(new Path(tableMetaInfo.getTableFsPath()));

            addFileInfoRec(tableFilesStatus, fileSystem, tableMetaInfo);
            tableMetaInfo.setFsCapcitySize(fileSystem.getStatus().getCapacity());
            tableMetaInfo.setFsUsedSize(fileSystem.getStatus().getUsed());
            tableMetaInfo.setFsRemainSize(fileSystem.getStatus().getRemaining());
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }
    }
    //       非叶子节点
    //                 指向下一个子节点  调用自己的方法
    //
    //       叶子节点
    //                 处理自己业务逻辑（累加变量中）
    //                 可以返回 或者 直接结束

    private void addFileInfoRec(FileStatus[] tableFilesStatus, FileSystem fileSystem, TableMetaInfo tableMetaInfo) {
        for (FileStatus filesStatus : tableFilesStatus) {
            if (filesStatus.isDirectory()) {
                try {
                    FileStatus[] subFileStatus = fileSystem.listStatus(filesStatus.getPath());
                    addFileInfoRec(subFileStatus, fileSystem, tableMetaInfo);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            } else {
                long tableNewSize = tableMetaInfo.getTableSize() + filesStatus.getLen();
                tableMetaInfo.setTableSize(tableNewSize);

                // 副本数的文件大小
                long tableNewTotalSize = tableMetaInfo.getTableTotalSize() + filesStatus.getLen() * filesStatus.getReplication();
                tableMetaInfo.setTableTotalSize(tableNewTotalSize);

                // 比较当前文件的最后修改时间和之前文件的最大修改时间
                Date modificationTime = new Date(filesStatus.getModificationTime());
                Date tableLastModifyTime = tableMetaInfo.getTableLastModifyTime();
                if (tableLastModifyTime == null || tableLastModifyTime.compareTo(modificationTime) < 0) {
                    tableMetaInfo.setTableLastModifyTime(modificationTime);
                }

                // 比较当前文件的最后访问时间 和  之间文件的最大的最后访问时间
                Date accessTime = new Date(filesStatus.getAccessTime());
                Date tableLastAccessTime = tableMetaInfo.getTableLastAccessTime();
                if (tableLastAccessTime == null || tableLastAccessTime.compareTo(accessTime) < 0) {
                    tableMetaInfo.setTableLastAccessTime(accessTime);
                }
            }
        }
    }

    @Override
    public List<TableMetaInfoVO> getTableMetaInfoList(TableMetaInfoForQuery tableMetaInfoForQuery) {
        int startOffset = (tableMetaInfoForQuery.getPageNo() - 1) * tableMetaInfoForQuery.getPageSize();
        StringBuilder sqlSb = new StringBuilder("SELECT\n" +
                        "\tt1.id,\n" +
                        "\tt1.table_name,\n" +
                        "\tt1.schema_name,\n" +
                        "\tt1.table_size,\n" +
                        "\tt1.table_total_size,\n" +
                        "\tt1.table_comment,\n" +
                        "\tt2.tec_owner_user_name,\n" +
                        "\tt2.busi_owner_user_name,\n" +
                        "\tt1.table_last_modify_time,\n" +
                        "\tt1.table_last_access_time\n" +
                        "FROM\n" +
                        "\ttable_meta_info t1\n" +
                        "JOIN table_meta_info_extra t2 ON t1.schema_name = t2.schema_name\n" +
                        "AND t1.table_name = t2.table_name\n" +
                        "WHERE\n" +
                        "\tt1.assess_date = (\n" +
                        "\tSELECT\n" +
                        "\t\tmax(assess_date)\n" +
                        "\tFROM\n" +
                        "\t\ttable_meta_info t3\n" +
                        "\tWHERE\n" +
                        "\t\tt1.schema_name = t3.schema_name\n" +
                        "\tAND t1.table_name = t3.table_name)\n"
                        );
        if(tableMetaInfoForQuery.getSchemaName()!=null){
            sqlSb.append("AND t1.schema_name LIKE '%"+ SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) +"%'\n");
        }
        if(tableMetaInfoForQuery.getTableName()!=null){
            sqlSb.append("AND t1.table_name LIKE '%"+SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName())+"%'\n");
        }
        if(tableMetaInfoForQuery.getDwLevel()!=null){
            sqlSb.append("AND t2.dw_level LIKE '%"+SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel())+"%'\n");
        }
        sqlSb.append(String.format("LIMIT %d,%d",startOffset,tableMetaInfoForQuery.getPageSize()));

        return this.baseMapper.selectTableMetaListPage(sqlSb.toString());
    }

    @Override
    public Integer getTableMetaInfoCount(TableMetaInfoForQuery tableMetaInfoForQuery) {
        StringBuilder sqlSb = new StringBuilder("SELECT\n" +
                "\tcount(1) cnt\n" +
                "FROM\n" +
                "\ttable_meta_info t1\n" +
                "JOIN table_meta_info_extra t2 ON t1.schema_name = t2.schema_name\n" +
                "AND t1.table_name = t2.table_name\n" +
                "WHERE\n" +
                "\tt1.assess_date = (\n" +
                "\tSELECT\n" +
                "\t\tmax(assess_date)\n" +
                "\tFROM\n" +
                "\t\ttable_meta_info t3\n" +
                "\tWHERE\n" +
                "\t\tt1.schema_name = t3.schema_name\n" +
                "\tAND t1.table_name = t3.table_name)\n"
        );
        if(tableMetaInfoForQuery.getSchemaName()!=null){
            sqlSb.append("AND t1.schema_name LIKE '%"+ SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) +"%'\n");
        }
        if(tableMetaInfoForQuery.getTableName()!=null){
            sqlSb.append("AND t1.table_name LIKE '%"+SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName())+"%'\n");
        }
        if(tableMetaInfoForQuery.getDwLevel()!=null){
            sqlSb.append("AND t2.dw_level LIKE '%"+SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel())+"%'\n");
        }
        return this.baseMapper.selectTableMetaCount(sqlSb.toString());
    }

    @Override
    public TableMetaInfo getTableMetaInfoAll(Long tableMetaInfoId) {
        TableMetaInfo tableMetaInfo = this.getById(tableMetaInfoId);
        TableMetaInfoExtra tableMetaInfoExtra = tableMetaInfoExtraService.getOne(new QueryWrapper<TableMetaInfoExtra>()
                .eq("schema_name", tableMetaInfo.getSchemaName())
                .eq("table_name", tableMetaInfo.getTableName()));
        if(tableMetaInfoExtra == null){
            tableMetaInfoExtra = TableMetaInfoExtra.builder().build();
        }
        tableMetaInfo.setTableMetaInfoExtra(tableMetaInfoExtra);
        return tableMetaInfo;
    }

    @Override
    public List<TableMetaInfo> getTableMetaInfoList() {
        return baseMapper.getTableMetaInfoListLastDt();
    }


}
