package com.atguigu.dga230301.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SimplePropertyPreFilter;
import com.atguigu.dga230301.meta.bean.TableMetaInfo;
import com.atguigu.dga230301.meta.bean.TableMetaInfoForQuery;
import com.atguigu.dga230301.meta.bean.TableMetaInfoVO;
import com.atguigu.dga230301.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga230301.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga230301.meta.service.TableMetaInfoService;
import com.atguigu.dga230301.meta.util.SqlUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author zhangjunyi
 * @since 2023-06-30
 */
@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {


    @Value("${hive.meta-server.url}")
    private String hiveMetaServerUrl;

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;

    @Autowired
    TableMetaInfoMapper tableMetaInfoMapper;


    @Override
    public void initMetaInfo(String assessDate, String schemaName) {
        //0.当前重复跑的话先删除当天已经跑数据
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate));

        //1.提取元数据信息
        List<TableMetaInfo> tableMetaInfoList = extractMetaInfo(assessDate, schemaName);

        //2.保存元数据信息  写库的时候 （除了redis）尽量不要一条数据写一次 这样会造成资源浪费
        saveOrUpdateBatch(tableMetaInfoList,500);

        //3.生成辅助信息
        tableMetaInfoExtraService.genExtraListByMetaList(tableMetaInfoList);
    }

    @Override
    public List<TableMetaInfoVO> getTableMetaInfoList(TableMetaInfoForQuery tableMetaInfoForQuery) {
        //1.拼接sql
        /**
         * select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time
         * from table_meta_info tm
         * join table_meta_info_extra te
         * on tm.table_name=te.table_name and tm.schema_name=te.schema_name
         * where assess_date=(select max(assess_date) from table_meta_info)
         * and tm.schema_name like '%gmall%'
         * and tm.table_name like '%order%'
         * and te.dw_level like '%dwd%'
         * limit （pageNo-1）*pageSize,pageSize
         */

        StringBuilder sqlStrBuilder = new StringBuilder();
        sqlStrBuilder.append("select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time ");
        sqlStrBuilder.append("from table_meta_info tm ");
        sqlStrBuilder.append("join table_meta_info_extra te ");
        sqlStrBuilder.append("on tm.table_name=te.table_name and tm.schema_name=te.schema_name ");
        sqlStrBuilder.append("where assess_date=(select max(assess_date) from table_meta_info) ");

        //判断传过来的参数是否有表名 库名 层级 有的话再拼过滤条件
        if (tableMetaInfoForQuery.getSchemaName()!=null){
            sqlStrBuilder.append("and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%' ");
        }

        if (tableMetaInfoForQuery.getTableName()!=null){
            sqlStrBuilder.append("and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%' ");
        }

        if (tableMetaInfoForQuery.getDwLevel()!=null){
            sqlStrBuilder.append("and te.dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%' ");
        }

        //从那条数据开始查询
        Integer from = (tableMetaInfoForQuery.getPageNo() - 1) * tableMetaInfoForQuery.getPageSize();

        sqlStrBuilder.append("limit " + from + "," + tableMetaInfoForQuery.getPageSize());

        //2.将sql传入mapper层执行
        List<TableMetaInfoVO> tableMetaInfoVOS = tableMetaInfoMapper.selectTableMetaInfoList(sqlStrBuilder.toString());

        return tableMetaInfoVOS;
    }

    @Override
    public Integer getTableMetaInfoCount(TableMetaInfoForQuery tableMetaInfoForQuery) {
        //1.拼接sql
        /**
         * select count(*)
         * from table_meta_info tm
         * join table_meta_info_extra te
         * on tm.table_name=te.table_name and tm.schema_name=te.schema_name
         * where assess_date=(select max(assess_date) from table_meta_info)
         * and tm.schema_name like '%gmall%'
         * and tm.table_name like '%order%'
         * and te.dw_level like '%dwd%'
         * limit （pageNo-1）*pageSize,pageSize
         */

        StringBuilder sqlStrBuilder = new StringBuilder();
        sqlStrBuilder.append("select count(*) ");
        sqlStrBuilder.append("from table_meta_info tm ");
        sqlStrBuilder.append("join table_meta_info_extra te ");
        sqlStrBuilder.append("on tm.table_name=te.table_name and tm.schema_name=te.schema_name ");
        sqlStrBuilder.append("where assess_date=(select max(assess_date) from table_meta_info) ");

        //判断传过来的参数是否有表名 库名 层级 有的话再拼过滤条件
        if (tableMetaInfoForQuery.getSchemaName()!=null){
            sqlStrBuilder.append("and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%' ");
        }

        if (tableMetaInfoForQuery.getTableName()!=null){
            sqlStrBuilder.append("and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%' ");
        }

        if (tableMetaInfoForQuery.getDwLevel()!=null){
            sqlStrBuilder.append("and te.dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%' ");
        }

        //2.将sql传入mapper层执行
        Integer count = tableMetaInfoMapper.selectTableMetaInfoCount(sqlStrBuilder.toString());
        return count;
    }

    @Override
    public List<TableMetaInfo> getAllTableMetaInfo() {
        List<TableMetaInfo> tableMetaInfoList=  tableMetaInfoMapper.selectAllTableMetaInfo();
        return tableMetaInfoList;
    }

    private List<TableMetaInfo> extractMetaInfo(String assessDate, String schemaName) {

        //0.创建list集合存放所有与的TableMetaInfo对象
        ArrayList<TableMetaInfo> metaInfoArrayList;

        //1.获取所有的表名
        try {
            List<String> allTableNameList = getHiveClient().getAllTables(schemaName);
            metaInfoArrayList = new ArrayList<>(allTableNameList.size());
            //2.遍历存放所有表名的集合，获取到每一张表名，然后在根据每个表名和库名获取到对应的hive hdfs元数据信息
            for (String tableName : allTableNameList) {

                //3.提取hive的元数据
                TableMetaInfo tableMetaInfo=getTableMetaInfoFromHive(tableName, schemaName);

                //4.提取hdfs的元数据
                getTableMetaInfoFromHdfs(tableMetaInfo);

                //5.设置考评日期
                tableMetaInfo.setAssessDate(assessDate);

                //6.设置创建时间
                tableMetaInfo.setCreateTime(new Date());

                metaInfoArrayList.add(tableMetaInfo);



            }
//            System.out.println(allTableNameList);
        } catch (TException e) {
            throw new RuntimeException(e);
        }

        return metaInfoArrayList;
    }

    private void getTableMetaInfoFromHdfs(TableMetaInfo tableMetaInfo) {
        try {
            //1.获取HDFS文件系统
            FileSystem fs = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()), new Configuration(), tableMetaInfo.getTableFsOwner());

            //2.获取指定路径的文件集（包括文件 和 目录）
            FileStatus[] fileStatuses = fs.listStatus(new Path(tableMetaInfo.getTableFsPath()));

            //思考：获取到的filestatus可能还有子目录，如何把每个子目录下的文件信息都获取到，可以使用方法的递归
            addFileInfo(tableMetaInfo, fileStatuses,fs);

            //设置当前文件系统容量
            tableMetaInfo.setFsCapcitySize(fs.getStatus().getCapacity());
            //设置当前文件系统使用量
            tableMetaInfo.setFsUsedSize(fs.getStatus().getUsed());
            //设置当前文件系统剩余量
            tableMetaInfo.setFsRemainSize(fs.getStatus().getRemaining());

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    private void addFileInfo(TableMetaInfo tableMetaInfo, FileStatus[] fileStatuses,FileSystem fs) {
        //1.循环遍历fileStatuses获取到指定路径下的所有文件/目录
        for (FileStatus fileStatus : fileStatuses) {
            //2.判断此次循环得到的是目录还是文件
            if (fileStatus.isFile()){
                //3.获取文件信息
                //获取副本个数
                short replicationNum = fileStatus.getReplication();
                //文件大小
                long fileSize = fileStatus.getLen();
                //这个文件当前访问时间
                long curAccessTime = fileStatus.getAccessTime();
                //当前这个文件访问时间
                long curModifiTime = fileStatus.getModificationTime();

                //设置数据量大小
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize()+fileSize);

                //设置所有副本数据量总大小  因为每个文件的副本数可能不一样所以要单独的拿某个文件的大小乘以这个文件的副本数
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize()+fileSize*replicationNum);

                //设置最后修改时间
                //判断当前这个表是否有最后的修改时间 如果没有的话当前获取的修改时间就作为最后修改时间
                //如果已经有最后修改时间的话要对比一下之前的最后修改时间和当前这个文件的修改时间
                // 获取最大的那个时间作为最后修改时间
                Date curModifiDate = new Date(curModifiTime);
                if (tableMetaInfo.getTableLastModifyTime()==null){
                    tableMetaInfo.setTableLastModifyTime(curModifiDate);
                } else if (tableMetaInfo.getTableLastModifyTime().getTime()<curModifiTime) {
                    tableMetaInfo.setTableLastModifyTime(curModifiDate);
                }

                //设置最后访问时间 逻辑和上面最后修改时间一致
                Date curAccessDate = new Date(curAccessTime);
                if (tableMetaInfo.getTableLastAccessTime()==null){
                    tableMetaInfo.setTableLastAccessTime(curAccessDate);
                } else if (tableMetaInfo.getTableLastAccessTime().getTime()<curAccessTime) {
                    tableMetaInfo.setTableLastAccessTime(curAccessDate);
                }
            }else {//获取的是个子目录
                try {
                    //递归调用此方法
                    //获取到子目录下面的所有文件和目录集合
                    FileStatus[] subFileStatus = fs.listStatus(fileStatus.getPath());
                    addFileInfo(tableMetaInfo,subFileStatus,fs);
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            }

        }
    }

    private TableMetaInfo getTableMetaInfoFromHive(String tableName, String schemaName) {
        //1.获取指定库指定表的元数据对象
        TableMetaInfo tableMetaInfo;
        try {
            Table table = getHiveClient().getTable(schemaName,tableName);
            tableMetaInfo = new TableMetaInfo();

            //设置库名、表名
            tableMetaInfo.setTableName(tableName);
            tableMetaInfo.setSchemaName(schemaName);

            //在转为JSON字符串的时候 可以只能需要哪些数据
            SimplePropertyPreFilter simplePropertyPreFilter = new SimplePropertyPreFilter("name", "type", "comment");

            //设置字段名
            tableMetaInfo.setColNameJson(JSON.toJSONString(table.getSd().getCols(),simplePropertyPreFilter));

            //分区字段名
            tableMetaInfo.setPartitionColNameJson(JSON.toJSONString(table.getPartitionKeys(),simplePropertyPreFilter));

            //hdfs所属人
            tableMetaInfo.setTableFsOwner(table.getOwner());

            //参数信息
            String tableParametersJson = JSON.toJSONString(table.getParameters());
            tableMetaInfo.setTableParametersJson(tableParametersJson);

            //表备注
            tableMetaInfo.setTableComment(JSON.parseObject(tableParametersJson, JSONObject.class).getString("comment"));

            //hdfs路径
            tableMetaInfo.setTableFsPath(table.getSd().getLocation());

            //输入格式
            tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());

            //输出格式
            tableMetaInfo.setTableOutputFormat(table.getSd().getOutputFormat());

            //行格式
            tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());

            //表创建时间
            //需要将精确到秒的时间戳转为Date类型
            String tableCreateTime = DateFormatUtils.format(new Date(table.getCreateTime() * 1000L), "yyyy-MM-dd HH:mm:ss");
            tableMetaInfo.setTableCreateTime(tableCreateTime);

            //表类型
            tableMetaInfo.setTableType(table.getTableType());

            //先判断是否有分桶
            if (table.getSd().getBucketCols().size()>0){
                //设置分桶列
                tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols()));
                //设置分桶个数
                tableMetaInfo.setTableBucketNum(table.getSd().getNumBuckets()+0L);
                //设置排序列
                tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));
            }
        } catch (TException e) {
            throw new RuntimeException(e);
        }

        return tableMetaInfo;
    }

    //获取hive的元数据客户端
    private IMetaStoreClient getHiveClient(){
        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf,MetastoreConf.ConfVars.THRIFT_URIS, hiveMetaServerUrl);
        try {
            return new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }

    }

}
