package com.hkbigdata.datagovernance.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SimplePropertyPreFilter;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.hkbigdata.datagovernance.meta.bean.TableMetaInfo;
import com.hkbigdata.datagovernance.meta.bean.TableMetaInfoForQuery;
import com.hkbigdata.datagovernance.meta.bean.TableMetaInfoVO;
import com.hkbigdata.datagovernance.meta.mapper.TableMetaInfoMapper;
import com.hkbigdata.datagovernance.meta.service.TableMetaInfoExtraService;
import com.hkbigdata.datagovernance.meta.service.TableMetaInfoService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.hkbigdata.datagovernance.meta.util.SqlUtil;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.ibatis.annotations.ResultMap;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.xml.crypto.Data;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author 
 * @since 2024-01-04
 */
@Service
@DS("mysqlWriting")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {
    @Value("${hive.meta-server.url}")
    private String hiveMetaServerUrl;

    @Autowired
    TableMetaInfoMapper tableMetaInfoMapper;

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;



    @Override
    public void initMetaInfo(String assessDate, String schemaName) {
//      重置数据，先删除当天已经有的数据
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate));


        //提取元数据信息
        List<TableMetaInfo> tableMetaInfoList = extractMetaInfo(assessDate, schemaName);

        //保存元数据信息，每500条数据写一次
        saveOrUpdateBatch(tableMetaInfoList, 500);

//      生成辅助信息
        tableMetaInfoExtraService.genExtraListByMetaList(tableMetaInfoList);

    }

    @Override
    public List<TableMetaInfoVO> getTableMetaInfoList(TableMetaInfoForQuery tableMetaInfoForQuery) {
        //1.拼接sql

        StringBuilder sqlStrBuilder = new StringBuilder();
        sqlStrBuilder.append("select tm.id, tm.table_name, tm.schema_name, table_comment, table_size, table_total_size, tec_owner_user_name, busi_owner_user_name, ");
        sqlStrBuilder.append("table_last_access_time, table_last_modify_time ");
        sqlStrBuilder.append("from table_meta_info tm ");
        sqlStrBuilder.append("join table_meta_info_extra te ");
        sqlStrBuilder.append("on tm.table_name = te.table_name and tm.schema_name = te.schema_name ");
        sqlStrBuilder.append("where assess_date = (select max(assess_date) from table_meta_info)");

        //判断传过来的参数是否有表名等
        if(tableMetaInfoForQuery.getSchemaName()!=null){
            sqlStrBuilder.append("and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%'");

        }
        if(tableMetaInfoForQuery.getTableName()!=null){
            sqlStrBuilder.append("and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%'");

        }
        if(tableMetaInfoForQuery.getDwLevel()!=null){
            sqlStrBuilder.append("and te.dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%'");

        }

        //从哪条数据开始查询
        Integer from = (tableMetaInfoForQuery.getPageNo() - 1) * tableMetaInfoForQuery.getPageSize();
        sqlStrBuilder.append("limit " + from +"," + tableMetaInfoForQuery.getPageSize());


        //2.将sql传入mapper层执行
        List<TableMetaInfoVO> tableMetaInfoVOS = tableMetaInfoMapper.selectTableMetaInfoList(sqlStrBuilder.toString());
        return tableMetaInfoVOS;
    }

    @Override
    public Integer getTableMetaInfoCount(TableMetaInfoForQuery tableMetaInfoForQuery) {
        StringBuilder sqlStrBuilder = new StringBuilder();
        sqlStrBuilder.append("select count(*) ");
        sqlStrBuilder.append("from table_meta_info tm ");
        sqlStrBuilder.append("join table_meta_info_extra te ");
        sqlStrBuilder.append("on tm.table_name = te.table_name and tm.schema_name = te.schema_name ");
        sqlStrBuilder.append("where assess_date = (select max(assess_date) from table_meta_info)");


        //判断传过来的参数是否有表名等
        if(tableMetaInfoForQuery.getSchemaName()!=null){
            sqlStrBuilder.append("and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%'");

        }
        if(tableMetaInfoForQuery.getTableName()!=null){
            sqlStrBuilder.append("and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%'");

        }
        if(tableMetaInfoForQuery.getDwLevel()!=null){
            sqlStrBuilder.append("and te.dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%'");

        }
        //2.将sql传入mapper层执行
        Integer count = tableMetaInfoMapper.selectgetTableMetaInfoCount(sqlStrBuilder.toString());
        return count;
    }

    private List<TableMetaInfo> extractMetaInfo(String assessDate, String schemaName) {
        //创建list集合存放所有的TableMetaInfo对象
        ArrayList<TableMetaInfo> metaInfoArrayList;

        //获取所有表名
        try {
            List<String> allTableNameList = getHiveClient().getAllTables(schemaName);
            metaInfoArrayList = new ArrayList<>(allTableNameList.size());
        //获取每张表的元数据
            for (String tableName : allTableNameList) {
//              提取hive元数据
                TableMetaInfo tableMetaInfo = getTableMetaInfoFromHive(tableName, schemaName);

//              提取hdfs元数据
                getTableMetaInfoFromHdfs(tableMetaInfo);

//              设置考评日期
                tableMetaInfo.setAssessDate(assessDate);

//              设置创建时间
                tableMetaInfo.setCreateTime(new Date());

                //添加进列表中
                metaInfoArrayList.add(tableMetaInfo);
            }

        } catch (TException e) {
            throw new RuntimeException(e);
        }

        return metaInfoArrayList;
    }

    private void getTableMetaInfoFromHdfs(TableMetaInfo tableMetaInfo) {
        try {
//          获取HDFS文件系统
            FileSystem fs = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()), new Configuration(), tableMetaInfo.getTableFsOwner());

//          获取指定路径的文件集（文件和目录）
            FileStatus[] fileStatuses = fs.listStatus(new Path(tableMetaInfo.getTableFsPath()));

//          获取文件和目录信息
            addFileInfo(tableMetaInfo, fileStatuses, fs);

//          设置当前文件系统容量
            tableMetaInfo.setFsCapcitySize(fs.getStatus().getCapacity());
//          设置当前文件系统使用量
            tableMetaInfo.setFsUsedSize(fs.getStatus().getUsed());
//          设置当前文件系统剩余量
            tableMetaInfo.setFsRemainSize(fs.getStatus().getRemaining());

        } catch (Exception e) {
            throw new RuntimeException(e);
        }


    }

    private void addFileInfo(TableMetaInfo tableMetaInfo, FileStatus[] fileStatuses, FileSystem fs) {
//      循环遍历filestatuses获取指定路径下所有文件和目录
        for (FileStatus fileStatus : fileStatuses) {
//          判断是文件还是目录
            if(fileStatus.isFile()) {
//              获取文件信息
//              副本个数
                short replicationNum = fileStatus.getReplication();
//              文件大小
                long fileSize = fileStatus.getLen();
//              文件当前访问时间
                long curAccessTime = fileStatus.getAccessTime();
//              修改时间
                long curModificationTime = fileStatus.getModificationTime();

//              设置数据量大小
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize() + fileSize);

//              设置所有副本数据量大小
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize() + fileSize * replicationNum);

//              设置最后修改时间
//              先判断是否有修改时间，若没有，则当前获取的时间作为最后修改时间，若有，则对比取大者
                Date curModifiDate = new Date(curModificationTime);
                if (tableMetaInfo.getTableLastModifyTime() == null) {
                    tableMetaInfo.setTableLastModifyTime(curModifiDate);
                } else if (tableMetaInfo.getTableLastModifyTime().getTime() < curModificationTime) {
                    tableMetaInfo.setTableLastModifyTime(curModifiDate);
                }

//              设置最后访问时间，逻辑与设置最后访问时间逻辑相同
                Date curAccessDate = new Date(curAccessTime);
                if (tableMetaInfo.getTableLastAccessTime() == null) {
                    tableMetaInfo.setTableLastAccessTime(curAccessDate);
                } else if (tableMetaInfo.getTableLastAccessTime().getTime() < curAccessTime) {
                    tableMetaInfo.setTableLastAccessTime(curAccessDate);
                }
            } else {
//              获取的是目录，递归调用方法
                FileStatus[] subfileStatus = new FileStatus[0];
                try {
                    subfileStatus = fs.listStatus(fileStatus.getPath());
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
                addFileInfo(tableMetaInfo, subfileStatus, fs);
            }
        }

    }

    private TableMetaInfo getTableMetaInfoFromHive(String tableName, String schemaName) {
        //获取指定库指定表的元数据对象
        TableMetaInfo tableMetaInfo;

        try {
            Table table = getHiveClient().getTable(schemaName, tableName);
            tableMetaInfo = new TableMetaInfo();

//          设置库名，表名
            tableMetaInfo.setTableName(tableName);
            tableMetaInfo.setSchemaName(schemaName);

//          设置过滤器
            SimplePropertyPreFilter simplePropertyPreFilter = new SimplePropertyPreFilter("name", "type", "common");

//          设置字段名
            tableMetaInfo.setColNameJson(JSON.toJSONString(table.getSd().getCols(), simplePropertyPreFilter));

//          分区字段名
            tableMetaInfo.setPartitionColNameJson(JSON.toJSONString(table.getPartitionKeys(), simplePropertyPreFilter));

//          hdfs所属人
            tableMetaInfo.setTableFsOwner(table.getOwner());

//          参数信息
//            String tableParameterJson = JSON.toJSONString(table.getParameters());
//            tableMetaInfo.setTableParametersJson(tableParameterJson);

            String tableParameterJson = JSON.toJSONString(table.getParameters());
            int maxLength = 2000;  // 适当的最大长度

            if (tableParameterJson.length() > maxLength) {
                tableMetaInfo.setTableParametersJson(null);
            } else {
                tableMetaInfo.setTableParametersJson(tableParameterJson);
            }


//          表备注
            tableMetaInfo.setTableComment(JSON.parseObject(tableParameterJson, JSONObject.class).getString("comment"));

//          hdfs路径
            tableMetaInfo.setTableFsPath(table.getSd().getLocation());

//          输入格式
            tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());

//          输出格式
            tableMetaInfo.setTableOutputFormat(table.getSd().getOutputFormat());

//          行格式
            tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());

//          表创建时间，将秒级时间戳转换为date类型
            String tableCreateTime = DateFormatUtils.format(new Date(table.getCreateTime() * 1000L), "yyyy-mm-dd HH:mm:ss");
            tableMetaInfo.setTableCreateTime(tableCreateTime);

//          表类型
            tableMetaInfo.setTableType(table.getTableType());

//          若有分桶，则获取
            if (table.getSd().getBucketCols().size() > 0) {
//              设置分桶列
                tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols()));
//              设置分桶个数(+ '0L' 是将返回的int强转成Long)
                tableMetaInfo.setTableBucketNum(table.getSd().getNumBuckets() + 0L);
//              设置排序列
                tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));
            }

        } catch (TException e) {
            throw new RuntimeException(e);
        }

        return tableMetaInfo;
    }

    //获取hive元数据客户端
    private IMetaStoreClient getHiveClient(){
        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf,MetastoreConf.ConfVars.THRIFT_URIS, hiveMetaServerUrl);
        try {
            return new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }
    }
    @ResultMap("tableMetaInfoMap")
    public List<TableMetaInfo>  getAllTableMeteInfo( ){
        List<TableMetaInfo> tableMetaInfoList = baseMapper.getTableMetaInfoListLastDt();
        return tableMetaInfoList;
    }

}
