package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfoForQuery;
import com.atguigu.dga.meta.bean.TableMetaInfoVO;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.atguigu.dga.meta.util.SqlUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author zhangjunyi
 * @since 2023-06-03
 */
@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {

    @Value("${hive.meta-server.url}")
    String hiveMetaServerUrl;

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;

    @Autowired
    TableMetaInfoMapper tableMetaInfoMapper;

    @Override
    public void initMetaInfo(String assessDate, String dataBaseName) {
        //0.重跑的时候先删除当前已经生成的数据
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate));

        //1.获取hive&hdfs元数据
        List<TableMetaInfo> tableMetaInfoList = extractMeta(assessDate, dataBaseName);
        //2.保存元数据到Mysql的表里面
        saveOrUpdateBatch(tableMetaInfoList);
        //3.调用方法初始化辅助信息表
        tableMetaInfoExtraService.genExtraListByMetaList(tableMetaInfoList);
    }

    @Override
    public List<TableMetaInfoVO> getTableMetaInfoList(TableMetaInfoForQuery tableMetaInfoForQuery) {
        /**
         * select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time
         * from table_meta_info tm
         * join table_meta_info_extra te
         * on ti.table_name=te.table_name and ti.schema_name=te.schema_name
         * where assess_date=(select max(assess_date) from table_meta_info)
         * and table_name like
         * and dwLevel like
         * and schema_name like
         * limit 1,20
         */
        //动态拼接sql查询需要的数据
        StringBuilder sql = new StringBuilder();
        sql.append("select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time");
        sql.append(" from table_meta_info tm\n" +
                "    join table_meta_info_extra te\n" +
                "    on tm.table_name=te.table_name and tm.schema_name=te.schema_name");
        sql.append(" where assess_date=(select max(assess_date) from table_meta_info)");

        if (tableMetaInfoForQuery.getTableName()!=null){
            sql.append("and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%'");
        }

        if (tableMetaInfoForQuery.getSchemaName()!=null){
            sql.append("and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%'");
        }

        if (tableMetaInfoForQuery.getDwLevel()!=null){
            sql.append("and dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%'");
        }

        //计算从哪一条数据开始获取
        Integer from = (tableMetaInfoForQuery.getPageNo() - 1) * tableMetaInfoForQuery.getPageSize();

        sql.append("limit "+from+"," + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getPageSize()+""));

        System.out.println(sql.toString());


        List<TableMetaInfoVO> tableMetaInfoList = tableMetaInfoMapper.getTableMetaInfoList(sql.toString());


        return tableMetaInfoList;
    }

    @Override
    public Integer getTableMetaInfoCount(TableMetaInfoForQuery tableMetaInfoForQuery) {

        StringBuilder sql = new StringBuilder();
        sql.append("select count(*)");
        sql.append(" from table_meta_info tm\n" +
                "    join table_meta_info_extra te\n" +
                "    on tm.table_name=te.table_name and tm.schema_name=te.schema_name");
        sql.append(" where assess_date=(select max(assess_date) from table_meta_info)");

        if (tableMetaInfoForQuery.getTableName()!=null){
            sql.append("and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%'");
        }

        if (tableMetaInfoForQuery.getSchemaName()!=null){
            sql.append("and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%'");
        }

        if (tableMetaInfoForQuery.getDwLevel()!=null){
            sql.append("and dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%'");
        }

        Integer count = tableMetaInfoMapper.getTableMetaInfoCount(sql.toString());

        return count;
    }

    @Override
    public List<TableMetaInfo> getTableMetaInfoAllList() {
        List<TableMetaInfo> tableMetaInfoList = tableMetaInfoMapper.selectTableMetaInfoList();
//        System.out.println(tableMetaInfoList);
        return tableMetaInfoList;
    }

    /**
     * 提取元数据
     *
     * @param assessDate   考评日期
     * @param dataBaseName 数据库名字
     * @return
     */
    private List<TableMetaInfo> extractMeta(String assessDate, String dataBaseName) {

        ArrayList<TableMetaInfo> tableMetaInfoArrayList;
        try {
            //1.因为要针对指定库的每张表获取对应的元数据，那么首先得知道这个库里面都有哪些表（表名）
            List<String> allTablesName = getHiveClient().getAllTables(dataBaseName);

            //创建一个List集合 用来保存每张离线数仓中的表所对应的TableMetaInfo对象
            //设置初始容量是为了防止集合扩容的时候浪费内存空间
            tableMetaInfoArrayList  = new ArrayList<>(allTablesName.size());

            //2.遍历存放所有表名的集合，根据表名获取到每一张表的元数据
            for (String tableName : allTablesName) {
                //3.获取每张表对应hive的元数据 返回的是一个没有hdfs元数据的对象
                TableMetaInfo tableMetaInfo = addMetaInfoFromHive(dataBaseName, tableName);
                //4.补充HDFS元数据信息
                addHDFSMetaInfo(tableMetaInfo);

                //设置考评日期
                tableMetaInfo.setAssessDate(assessDate);

                //创建时间 这个表对应这条元数据的创建时间
                tableMetaInfo.setCreateTime(new Date());

                tableMetaInfoArrayList.add(tableMetaInfo);

            }
        } catch (TException e) {
            throw new RuntimeException(e);
        }

        return tableMetaInfoArrayList;
    }

    private void addHDFSMetaInfo(TableMetaInfo tableMetaInfo) {
        try {
            FileSystem fs = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()), new Configuration(), tableMetaInfo.getTableFsOwner());

            //获取指定路径下的内容
            FileStatus[] fileStatuses = fs.listStatus(new Path(tableMetaInfo.getTableFsPath()));

            //考虑！！！ 指定的路径下有文件也有可能有目录
            //使用递归方式去遍历
            //如果是目录的话要再次进到目录里面进行获取这个子目录中的内容
            //因为要获取的hdfs元数据其实包含了指定目录下每个文件的信息
            addFileInfo(tableMetaInfo, fileStatuses, fs);

            tableMetaInfo.setFsCapcitySize(fs.getStatus().getCapacity());
            tableMetaInfo.setFsUsedSize(fs.getStatus().getUsed());
            tableMetaInfo.setFsRemainSize(fs.getStatus().getRemaining());


        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    /**
     * 添加某个文件的信息
     * @param tableMetaInfo
     * @param fileStatuses
     * @param fs
     */
    private void addFileInfo(TableMetaInfo tableMetaInfo, FileStatus[] fileStatuses, FileSystem fs) throws IOException {
        for (FileStatus fileStatus : fileStatuses) {
            //判断是文件还是目录
            if (fileStatus.isFile()) {
                //获取元数据
                long fileSize = fileStatus.getLen();//具体某个文件的大小
                short repNum = fileStatus.getReplication();//这个文件的副本数
                long modificationTime = fileStatus.getModificationTime();//这个文件的修改时间
                long accessTime = fileStatus.getAccessTime();//这个文件的访问时间

                //设置表数据量大小
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize()+fileSize);

                //所有副本数据量总大小  这么计算的原因是因为每个文件的副本数可能不一样
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize()+fileSize*repNum);

                //先将时间转为Date类型
                Date modifDate = new Date(modificationTime);
                Date accessDate = new Date(accessTime);


                //设置最后修改时间
                //首先判断这个表有没有修改时间
                if (tableMetaInfo.getTableLastModifyTime()==null){
                    tableMetaInfo.setTableLastModifyTime(modifDate);
                }else if (tableMetaInfo.getTableLastModifyTime().getTime()<modificationTime){
                    tableMetaInfo.setTableLastModifyTime(modifDate);
                }

                //设置最后访问时间
                if (tableMetaInfo.getTableLastAccessTime()==null){
                    tableMetaInfo.setTableLastAccessTime(accessDate);
                }else if (tableMetaInfo.getTableLastAccessTime().getTime()<accessTime){
                    tableMetaInfo.setTableLastAccessTime(accessDate);
                }

            } else {
                FileStatus[] subFileStaus = fs.listStatus(fileStatus.getPath());
                //递归调用方法直到没有目录为止
                addFileInfo(tableMetaInfo,subFileStaus,fs);
            }
        }
    }

    /**
     * 获取指定表的Hive元数据信息
     *
     * @param dataBaseName
     * @param tableName
     * @return
     */
    private TableMetaInfo addMetaInfoFromHive(String dataBaseName, String tableName) {
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        try {
            Table table = getHiveClient().getTable(dataBaseName, tableName);
            //设置表名&数据库名字
            tableMetaInfo.setTableName(tableName);
            tableMetaInfo.setSchemaName(dataBaseName);

            //设置字段名
            PropertyPreFilters.MySimplePropertyPreFilter mySimplePropertyPreFilter = new PropertyPreFilters().addFilter("name", "type", "comment");
            tableMetaInfo.setColNameJson(JSON.toJSONString(table.getSd().getCols(), mySimplePropertyPreFilter));
            //获取分区字段名JSON
            tableMetaInfo.setPartitionColNameJson(JSON.toJSONString(table.getPartitionKeys(), mySimplePropertyPreFilter));
            //hdfs所属人
            tableMetaInfo.setTableFsOwner(table.getOwner());

            //参数信息
            tableMetaInfo.setTableParametersJson(JSON.toJSONString(table.getParameters()));

            //表备注
            tableMetaInfo.setTableComment(table.getParameters().get("comment"));

            //hdfs路径
            tableMetaInfo.setTableFsPath(table.getSd().getLocation());

            //输入输出格式
            tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());
            tableMetaInfo.setTableOutputFormat(table.getSd().getOutputFormat());

            //行格式
            tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());

            //表创建时间
            //对精确到秒的时间戳做格式化
            String createTime = DateFormatUtils.format(new Date(table.getCreateTime() * 1000L), "yyyy-MM-dd HH:mm:ss");
            tableMetaInfo.setTableCreateTime(createTime);

            //表类型
            tableMetaInfo.setTableType(table.getTableType());

            //先判断是否有分桶
            if (table.getSd().getBucketCols().size() > 0) {
                tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols()));
                tableMetaInfo.setTableBucketNum(table.getSd().getNumBuckets() + 0L);
                tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));
            }
        } catch (TException e) {
            throw new RuntimeException(e);
        }
        return tableMetaInfo;
    }

    // 初始化 hive 客户端
    private IMetaStoreClient getHiveClient() {
        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf, MetastoreConf.ConfVars.THRIFT_URIS, hiveMetaServerUrl);
        try {
            return new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }

    }

}
