package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SimplePropertyPreFilter;
import com.atguigu.dga.meta.bean.PageTableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author atguigu
 * @since 2023-06-05
 */
@Service
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {

    //@Autowired
    private HiveMetaStoreClient client;

    @Value("${hdfs.uri}")
    private String hdfsUri;

    @Value("${hdfs.admin}")
    private String hdfsAdmin;

    @Override
    public void initMetaInfo(String db, String assessDate) throws Exception {
        //先把今天生成的数据删除
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date",assessDate).eq("schema_name",db));
        //从hive的元数据中抽取表的描述
        List<TableMetaInfo> tableMetaInfos = extractMetaInfoFromHive(db, assessDate);
        //使用hdfs客户端统计表目录的元数据信息
        extractMetaInfoFromHdfs(tableMetaInfos);
        //把表的元数据信息写入数据库
        saveBatch(tableMetaInfos);
    }


    //抽取hive的元数据信息
    public List<TableMetaInfo> extractMetaInfoFromHive(String db,String assessDate) throws Exception {
        //创建最终返回的结果
        List<TableMetaInfo> result = new ArrayList<>();
        //获取库下所有的表
        List<String> allTables = client.getAllTables(db);

        //每一张表都需要获取元数据信息
        for (String table : allTables) {
            Table tableMeta = client.getTable(db, table);
            //执行封装逻辑
            TableMetaInfo tableMetaInfo = createTableMetaInfo(tableMeta);
            //设置考评日期
            tableMetaInfo.setAssessDate(assessDate);
            tableMetaInfo.setCreateTime(new Timestamp(System.currentTimeMillis()));
            result.add(tableMetaInfo);
        }
        return result;
    }

    /*
   该方法用来封装 Table tableMeta 中的元数据信息到TableMetaInfo
    */
    private TableMetaInfo createTableMetaInfo(Table tableMeta){
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        tableMetaInfo.setTableName(tableMeta.getTableName());
        tableMetaInfo.setSchemaName(tableMeta.getDbName());
        //很多信息都在sd中
        StorageDescriptor sd = tableMeta.getSd();
        //过滤出列的元数据中指定的字段信息
        SimplePropertyPreFilter simplePropertyPreFilter = new SimplePropertyPreFilter("comment", "name", "type");
        /*
        获取所有的字段信息，制作为json
         */
        tableMetaInfo.setColNameJson(JSON.toJSONString(sd.getCols(),simplePropertyPreFilter));
        tableMetaInfo.setPartitionColNameJson(JSON.toJSONString(tableMeta.getPartitionKeys(),simplePropertyPreFilter));
        tableMetaInfo.setTableFsOwner(tableMeta.getOwner());
        //不确定
        tableMetaInfo.setTableParametersJson(JSON.toJSONString(tableMeta.getParameters()));
        tableMetaInfo.setTableComment(tableMeta.getParameters().get("comment"));
        tableMetaInfo.setTableFsPath(sd.getLocation());
        tableMetaInfo.setTableInputFormat(sd.getInputFormat());
        tableMetaInfo.setTableOutputFormat(sd.getOutputFormat());
        tableMetaInfo.setTableRowFormatSerde(sd.getSerdeInfo().getSerializationLib());
        tableMetaInfo.setTableCreateTime(tableMeta.getCreateTime() + "");
        tableMetaInfo.setTableType(tableMetaInfo.getTableType());
        tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(sd.getBucketCols()));
        tableMetaInfo.setTableBucketNum(sd.getNumBuckets() + 0l);
        tableMetaInfo.setTableSortColsJson(JSON.toJSONString(sd.getSortCols()));

        return tableMetaInfo;
    }


    //抽取hdfs的元数据信息
    private void extractMetaInfoFromHdfs(List<TableMetaInfo> tableMetaInfos) throws Exception {
        /*
        准备一个hdfs 的客户端对象

        查看数仓中所有表在hdfs的元数据信息，这个用户必须有权限才可以
        直接放一个hdfs管理员
         */
        FileSystem hdfs = FileSystem.get(new URI(hdfsUri), new Configuration(), hdfsAdmin);

        //遍历所有表
        for (TableMetaInfo tableMetaInfo : tableMetaInfos) {
            //获取当前表在hdfs存储路径 表目录
            String tableFsPath = tableMetaInfo.getTableFsPath();
            //这三个指标无需遍历目录
            FileStatus[] fileStatuses = hdfs.listStatus(new Path(tableFsPath));
            //统计表中文件的大小
            statsTableSize(fileStatuses,tableMetaInfo,hdfs);
            //继续封装
            FsStatus status = hdfs.getStatus();
            tableMetaInfo.setFsCapcitySize(status.getCapacity());
            tableMetaInfo.setFsUsedSize(status.getUsed());
            tableMetaInfo.setFsRemainSize(status.getRemaining());
        }
    }

    /*
    遍历表目录，统计目录中所有文件大小之和
     */
    private void statsTableSize(FileStatus[] fileStatuses,TableMetaInfo tableMetaInfo,FileSystem hdfs) throws IOException {
        for (FileStatus fileStatus : fileStatuses) {
            //判断表目录下的这个文件是目录还是直接就是文件
            if (fileStatus.isFile()){
                /*
                直接统计 运算时，要避免双方为null,可以给bean的属性初始化
                为下面计算的四个bean的属性初始化了默认值
                 */
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize() + fileStatus.getLen());
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize() + fileStatus.getLen() * fileStatus.getReplication());
                //取当前表目录中的某个文件的最大访问时间，作为表目录的最大访问时间
                tableMetaInfo.getTableLastModifyTime().setTime(
                        Math.max(tableMetaInfo.getTableLastModifyTime().getTime(),fileStatus.getModificationTime())
                );
                tableMetaInfo.getTableLastAccessTime().setTime(
                        Math.max(tableMetaInfo.getTableLastAccessTime().getTime(),fileStatus.getAccessTime())
                );
            }else {
                //当前不是文件，是一个目录，继续遍历
                //列出目录下所有的文件
                FileStatus[] subFileStatuses = hdfs.listStatus(fileStatus.getPath());
                //递归 所有涉及到文件系统的遍历，都是递归
                statsTableSize(subFileStatuses,tableMetaInfo,hdfs);
            }
        }
    }


    /*
    查询数据库
        根据 String tableName, String schemaName, String dwLevel
            生成相应的过滤条件。

           MybatisPlus的service类，可以直接通过 属性baseMapper获取对应的Mapper接口！

     */
    @Override
    public List<PageTableMetaInfo> queryTableMetaInfoList(Integer from, Integer pageSize,
                                                          String tableName, String schemaName, String dwLevel) {

        //String whereSql = generateWhereSql(tableName, schemaName, dwLevel);

        List<PageTableMetaInfo> data = baseMapper.queryTableMetaInfoList(from, pageSize, schemaName,tableName,dwLevel);

        return data;
    }

    @Override
    public int statsTotalNum(String tableName, String schemaName, String dwLevel) {
        //String whereSql = generateWhereSql(tableName, schemaName, dwLevel);
        return baseMapper.statsTotalNum(schemaName,tableName,dwLevel);

    }

    /*
    根据前台传入的条件，生成where过滤条件
        容易产生sql注入，不用了
        在Mybatis的sql中使用动态sql技术拼接sql
     */
    private String generateWhereSql(String tableName, String schemaName, String dwLevel){
        StringBuilder whereSb = new StringBuilder(" where ");

        /*
        根据前台传入的参数，去拼接sql
        StringUtils.isBlank(str) :str不是null，不是'',不是"空格/回车/制表符"，返回true
         */
        if (StringUtils.isNotBlank(schemaName)){
            whereSb.append(" schema_name = '" + schemaName).append("' and ");
        }
        if (StringUtils.isNotBlank(tableName)){
            whereSb.append(" table_name = '" + tableName).append("' and ");
        }
        if (StringUtils.isNotBlank(dwLevel)){
            whereSb.append(" dw_level = '" + dwLevel).append("' and ");
        }
        whereSb.append(" 1 = 1");

        return whereSb.toString();
    }

    @Override
    public List<TableMetaInfo> queryAllTableMeta() {
        return baseMapper.queryAllTableMeta();
    }

}
