package com.atguigu.dga230315.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SimplePropertyPreFilter;
import com.atguigu.dga230315.meta.bean.TableMetaInfo;
import com.atguigu.dga230315.meta.bean.TableMetaInfoForQuery;
import com.atguigu.dga230315.meta.bean.TableMetaInfoVO;
import com.atguigu.dga230315.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga230315.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga230315.meta.service.TableMetaInfoService;
import com.atguigu.dga230315.meta.utils.SqlUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {

    @Value("${hive.meta-server.url}")
    String hiveMetaServerUrl;

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;

    @Autowired
    TableMetaInfoMapper tableMetaInfoMapper;

    @Override
    public void initMetaInfo(String assessDate, String schemaName) {
        //重跑的时候先删除跑过的数据
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate));

        //1.获取hive 和 hdfs元数据
       List<TableMetaInfo> tableMetaInfoList = extractMetaInfo(assessDate, schemaName);
        //2.将获取到的元数据保存到mysql
        saveOrUpdateBatch(tableMetaInfoList, 500);

        //3.初始化辅助信息
        tableMetaInfoExtraService.genExtraListByMetaList(tableMetaInfoList);
    }

    @Override
    public List<TableMetaInfoVO> getTableMetaInfoList(TableMetaInfoForQuery tableMetaInfoForQuery) {
        //动态拼接查询语句 查询元数据的明细
        /**
         * select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time
         * from table_meta_info tm
         * join table_meta_info_extra te
         * on tm.table_name=te.table_name and tm.schema_name=te.schema_name
         * where assess_date=(select max(assess_date) from table_meta_info)
         * and tm.schema_name like '%gmall%'
         * and tm.table_name like '%order%'
         * and te.dw_level like '%dwd%'
         * limit （pageNo-1）*pageSize,pageSize
         */

/*        String sql = "select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time\n" +
                "          from table_meta_info tm\n" +
                "          join table_meta_info_extra te\n" +
                "          on tm.table_name=te.table_name and tm.schema_name=te.schema_name\n" +
                "          where assess_date=(select max(assess_date) from table_meta_info)\n" +
                "          and tm.schema_name like '%"+tableMetaInfoForQuery.getSchemaName()+"%'\n" +
                "          and tm.table_name like '%"+tableMetaInfoForQuery.getTableName()+"%'\n" +
                "          and te.dw_level like '%"+tableMetaInfoForQuery.getDwLevel()+"%'\n" +
                "          limit （"+tableMetaInfoForQuery.getPageNo()+"-1）*"+tableMetaInfoForQuery.getPageSize()+","+tableMetaInfoForQuery.getPageSize()+"";*/


        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time ");
        stringBuilder.append("  from table_meta_info tm\n" +
                "   join table_meta_info_extra te ");
        stringBuilder.append("on tm.table_name=te.table_name and tm.schema_name=te.schema_name ");
        //获取最大的考评时间 ===>获取最新的元数据 因为在tableMetaInfo表中 一个表对应多份元数据（一天一份）
        stringBuilder.append(" where assess_date=(select max(assess_date) from table_meta_info) ");

        if (tableMetaInfoForQuery.getSchemaName()!=null&&tableMetaInfoForQuery.getSchemaName().length()>0){
            stringBuilder.append(" and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%'");
        }

        if (tableMetaInfoForQuery.getTableName()!=null&&tableMetaInfoForQuery.getTableName().length()>0){
            stringBuilder.append(" and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%'");
        }

        if (tableMetaInfoForQuery.getDwLevel()!=null&&tableMetaInfoForQuery.getDwLevel().length()>0){
            stringBuilder.append(" and te.dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%'");
        }

        Integer from = (tableMetaInfoForQuery.getPageNo() - 1) * tableMetaInfoForQuery.getPageSize();

        stringBuilder.append("limit " + from + "," + tableMetaInfoForQuery.getPageSize());

        List<TableMetaInfoVO> tableMetaInfoVOList = tableMetaInfoMapper.selectTableMetaInfoList(stringBuilder.toString());



        return tableMetaInfoVOList;
    }

    @Override
    public Integer getTableMetaInfoCount(TableMetaInfoForQuery tableMetaInfoForQuery) {

        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("select count(*) ");
        stringBuilder.append("  from table_meta_info tm\n" +
                "   join table_meta_info_extra te ");
        stringBuilder.append("on tm.table_name=te.table_name and tm.schema_name=te.schema_name ");
        //获取最大的考评时间 ===>获取最新的元数据 因为在tableMetaInfo表中 一个表对应多份元数据（一天一份）
        stringBuilder.append(" where assess_date=(select max(assess_date) from table_meta_info) ");

        if (tableMetaInfoForQuery.getSchemaName()!=null&&tableMetaInfoForQuery.getSchemaName().length()>0){
            stringBuilder.append(" and tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName()) + "%'");
        }

        if (tableMetaInfoForQuery.getTableName()!=null&&tableMetaInfoForQuery.getTableName().length()>0){
            stringBuilder.append(" and tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName()) + "%'");
        }

        if (tableMetaInfoForQuery.getDwLevel()!=null&&tableMetaInfoForQuery.getDwLevel().length()>0){
            stringBuilder.append(" and te.dw_level like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel()) + "%'");
        }

        Integer count = tableMetaInfoMapper.selectTableMetaInfoCount(stringBuilder.toString());

        return count;
    }

    /**
     * //利用这个方法查询到所有最新的TableMetaInfo数据和辅助信息表数据关联后的
     * @return 结果集(包含辅助信息的TableMetaInfo)
     */
    @Override
    public List<TableMetaInfo> getAllTableMetaInfoList() {
        List<TableMetaInfo> tableMetaInfoList = tableMetaInfoMapper.selectAllTableMetaInfoList();
//        System.out.println("111111");
        return tableMetaInfoList;
    }

    private List<TableMetaInfo> extractMetaInfo(String assessDate, String schemaName) {
        ArrayList<TableMetaInfo> tableMetaInfoArrayList;
        try {
            //1.获取所有要考评表（离线数仓中涉及的表）的表名
            List<String> allTablesName = getHiveClient().getAllTables(schemaName);
            tableMetaInfoArrayList = new ArrayList<>(allTablesName.size());
            //2.获取每张表的元数据  根据库名+表名来获取
            for (String tableName : allTablesName) {
                //3.创建方法专门用来提取Hive的元数据  返回一个TableMetaInfo对象（缺少hdfs元数据）那么就相当于是这个表的一行数据
                TableMetaInfo tableMetaInfo=addMeatInfoFromHive(schemaName, tableName);

                //4.获取hdfs元数据
                addMetaInfoFromHDFS(tableMetaInfo);

                //5.设置考评日期
                tableMetaInfo.setAssessDate(assessDate);

                //设置创建时间
                tableMetaInfo.setCreateTime(new Date());


                tableMetaInfoArrayList.add(tableMetaInfo);

            }

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        return tableMetaInfoArrayList;
    }

    //获取hdfs元数据
    private void addMetaInfoFromHDFS(TableMetaInfo tableMetaInfo) throws URISyntaxException, IOException, InterruptedException {
        //1.获取HDFS文件系统
        FileSystem fs = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()), new Configuration(), tableMetaInfo.getTableFsOwner());

        //2.获取到指定路径下所有的文件内容
        FileStatus[] fileStatuses = fs.listStatus(new Path(tableMetaInfo.getTableFsPath()));

        //3.创建一个方法专门获取hdfs每个文件的信息
        addFileInfo(fs,fileStatuses,tableMetaInfo);

        //4.补充当前文件系统 容量/使用量/剩余量
        FsStatus fsStatus = fs.getStatus();
        tableMetaInfo.setFsCapcitySize(fsStatus.getCapacity());
        tableMetaInfo.setFsUsedSize(fsStatus.getUsed());
        tableMetaInfo.setFsRemainSize(fsStatus.getRemaining());

    }

    private void addFileInfo(FileSystem fs,FileStatus[] fileStatuses, TableMetaInfo tableMetaInfo) throws IOException {
        //1.遍历fileStatues
        for (FileStatus fileStatus : fileStatuses) {
            //2.判断是否为文件，如果不是文件的话再次重复调用此方法
            if (fileStatus.isFile()){
                //获取文件大小
                long fileSize = fileStatus.getLen();
                //获取副本数
                short replication = fileStatus.getReplication();
                //获取修改时间
                long modificationTime = fileStatus.getModificationTime();
                //获取访问时间
                long accessTime = fileStatus.getAccessTime();

                //设置数据量大小
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize()+fileSize);

                //所有副本数据总量大小  每个文件都要和自己的副本数相乘 因为有可能每个文件的副本数不一致
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize()+fileSize*replication);


                //最后修改时间   获取之前的修改时间和当前获取到的修改时间做对比，取最大的时间
                /*if (tableMetaInfo.getTableLastModifyTime()!=null&&tableMetaInfo.getTableLastModifyTime().getTime()<modificationTime){
                    //将时间戳修改为Date类型
                    tableMetaInfo.setTableLastModifyTime(new Date(modificationTime));
                }*/

                if (tableMetaInfo.getTableLastModifyTime()!=null){
                    if (tableMetaInfo.getTableLastModifyTime().getTime()<modificationTime){
                        tableMetaInfo.setTableLastModifyTime(new Date(modificationTime));
                    }
                }else {
                    tableMetaInfo.setTableLastModifyTime(new Date(modificationTime));
                }

                //最后访问时间 获取之前的访问时间和当前获取到的访问时间做对比，取最大的时间‘
                if (tableMetaInfo.getTableLastAccessTime()!=null){
                    if (tableMetaInfo.getTableLastAccessTime().getTime()<accessTime){
                        //将时间戳转为Date类型
                        tableMetaInfo.setTableLastAccessTime(new Date(accessTime));
                    }
                }else {
                    tableMetaInfo.setTableLastAccessTime(new Date(accessTime));
                }


            }else {
                //是目录
                Path path = fileStatus.getPath();
                FileStatus[] subFileStatus = fs.listStatus(path);
                addFileInfo(fs,subFileStatus, tableMetaInfo);
            }
        }
    }

    //提取hive的元数据
    private TableMetaInfo addMeatInfoFromHive(String schemaName, String tableName) {
        TableMetaInfo tableMetaInfo;
        //1.获取Table对象
        try {
            Table table = getHiveClient().getTable(schemaName, tableName);

            //2.创建TableMetaInfo对象
           tableMetaInfo = new TableMetaInfo();

            //获取表名&库名
            tableMetaInfo.setTableName(tableName);
            tableMetaInfo.setSchemaName(schemaName);

            //过滤器，主要作用在将数据转为JSON字符串的时候可以指定需要哪些数据 就指定到括号中
            SimplePropertyPreFilter simplePropertyPreFilter = new SimplePropertyPreFilter("name", "type", "comment");

            //字段名
            StorageDescriptor storageDescriptor = table.getSd();
            tableMetaInfo.setColNameJson(JSON.toJSONString(storageDescriptor.getCols(),simplePropertyPreFilter));

            //分区字段
            tableMetaInfo.setPartitionColNameJson(JSON.toJSONString(table.getPartitionKeys(),simplePropertyPreFilter));

            //Hdfs所属人
            tableMetaInfo.setTableFsOwner(table.getOwner());

            //表参数
            Map<String, String> parameters = table.getParameters();
            tableMetaInfo.setTableParametersJson(JSON.toJSONString(parameters));

            //表备注
            tableMetaInfo.setTableComment(parameters.get("comment"));

            //HDFS路径
            tableMetaInfo.setTableFsPath(storageDescriptor.getLocation());

            //输入输出格式
            tableMetaInfo.setTableInputFormat(storageDescriptor.getInputFormat());
            tableMetaInfo.setTableOutputFormat(storageDescriptor.getOutputFormat());

            //行格式
            tableMetaInfo.setTableRowFormatSerde(storageDescriptor.getSerdeInfo().getSerializationLib());

            //表创建时间 精确到年月日 时分秒  yyyy-MM-dd HH：mm：ss    注意整形的精度问题！！！！
            Date createTime = new Date(table.getCreateTime() * 1000L);
            String createTimeStr = DateFormatUtils.format(createTime, "yyyy-MM-dd HH:mm:ss");
            tableMetaInfo.setTableCreateTime(createTimeStr);

            //表类型
            tableMetaInfo.setTableType(table.getTableType());

            //可以先判断是否分桶如果有分桶的话再获取分桶列 分桶数 排序列
            if(storageDescriptor.getBucketCols().size()>0){
                //分桶列
                tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(storageDescriptor.getBucketCols()));
                //分桶个数
                tableMetaInfo.setTableBucketNum(storageDescriptor.getNumBuckets()*1L);

                //排序列
                tableMetaInfo.setTableSortColsJson(JSON.toJSONString(storageDescriptor.getSortCols()));
            }

        } catch (TException e) {
            throw new RuntimeException(e);
        }
        return tableMetaInfo;
    }

    //Hive客户端
    private IMetaStoreClient getHiveClient(){
        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf,MetastoreConf.ConfVars.THRIFT_URIS, hiveMetaServerUrl);
        try {
            return new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }

    }

}
