package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfoForQuery;
import com.atguigu.dga.meta.bean.TableMetaInfoVO;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.atguigu.dga.util.SqlUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author wuzhida
 * @since 2024-06-24
 */
@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {

    private IMetaStoreClient hiveClient;

    @Value("${hive.metastore.server.uris}")
    private String hiveMetaStoreServerUris;
    @Value("${hdfs.uris}")
    private String hdfsUris;
    @Autowired
    private TableMetaInfoExtraService tableMetaInfoExtraService;
    @Autowired
    private TableMetaInfoMapper tableMetaInfoMapper;

    /**
     * 初始化TableMetaInfo表
     *
     * @param schemaName 待考评的库(从hive中提取元数据的库)
     * @param assessDate 考评日期(方便教学---教学用的，或者可以指定考评时间用)
     *                   <p>
     *                   步骤：
     *                   0. 删除当前日期存储的数据信息
     *                   1. 提取hive元数据
     *                   2. 提取HDFS元数据
     *                   3. 将提取到的元数据整合后存储到数据库中
     *                   4. 初始化辅助信息(给初始值)
     */
    @Override
    public void initTableMetaInfo(String schemaName, String assessDate) throws Exception {
        //0. 删除当前日期存储的数据信息
        remove(
                new QueryWrapper<TableMetaInfo>()
                        .eq("schema_name", schemaName)
                        .eq("assess_date", assessDate)
        );
        
        //1. 提取hive元数据
        //1.1 获取所有表名
        List<String> allTables = hiveClient.getAllTables(schemaName);
        List<TableMetaInfo> tableMetaInfos = new ArrayList<>(allTables.size());
        for (String tableName : allTables) {
            //1.2 获取表对象
            Table table = hiveClient.getTable(schemaName, tableName);
            System.out.println("table = " + table);

            //1.3 从table对象中提取元数据， 存到TableMetaInfo对象中
            TableMetaInfo tableMetaInfo = extractTableMetaInfoFromHive(table);
            System.out.println("tableMetaInfo = " + tableMetaInfo);

            //2. 提取HDFS元数据
            extractTableMetaInfoFromHDFS(tableMetaInfo);

            tableMetaInfo.setAssessDate(assessDate);
            tableMetaInfo.setCreateTime(new Date());

            //3. 将提取到的元数据整合后存储到数据库中
            //攒批
            tableMetaInfos.add(tableMetaInfo);
        }
        //3. 将提取到的元数据整合后存储到数据库中
        saveBatch(tableMetaInfos);

        //4. 初始化辅助信息(给初始值)
        tableMetaInfoExtraService.initTableMetaInfoExtra(tableMetaInfos);
    }

    /**
     * 元数据管理平台--查询方法--获取所有数据
     *
     * @param tableMetaInfoForQuery 查询条件: schemaName, tableName, dwLevel, pageSize, pageNo
     * @return
     */
    @Override
    public List<TableMetaInfoVO> getTableMetaInfoByConditionAndPage(TableMetaInfoForQuery tableMetaInfoForQuery) {
        //条件查询需要动态拼接SQL
        StringBuilder sql = new StringBuilder(
                "SELECT \n" +
                        "\tti.id,\n" +
                        "\tti.table_name,\n" +
                        "\tti.schema_name,\n" +
                        "\tti.table_size,\n" +
                        "\tti.table_total_size,\n" +
                        "\tti.table_comment,\n" +
                        "\tte.busi_owner_user_name,\n" +
                        "\tte.busi_owner_user_name,\n" +
                        "\tti.table_last_modify_time,\n" +
                        "\tti.table_last_access_time\n" +      
                        "FROM table_meta_info ti LEFT JOIN table_meta_info_extra te \n" +
                        "ON ti.schema_name = te.schema_name \n" +
                        "AND ti.table_name = te.table_name \n" +
                        "WHERE ti.assess_date = (SELECT MAX(assess_date) FROM table_meta_info) "
        );

        //schemaName条件存在
        if (tableMetaInfoForQuery.getSchemaName() != null && !tableMetaInfoForQuery.getSchemaName().trim().isEmpty()) {
            sql.append("AND ti.schema_name = '" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName().trim()) + "' \n");
        }

        //tableName条件存在
        if (tableMetaInfoForQuery.getTableName() != null && !tableMetaInfoForQuery.getTableName().trim().isEmpty()) {
            sql.append("AND ti.table_name LIKE '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName().trim()) + "%' \n");
        }

        //dwLevel条件存在
        if (tableMetaInfoForQuery.getDwLevel() != null && !tableMetaInfoForQuery.getDwLevel().trim().isEmpty()) {
            sql.append("AND te.dw_level = '" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel().trim()) + "' \n");
        }

        //分页条件
        Integer pageNo = tableMetaInfoForQuery.getPageNo();
        Integer pageSize = tableMetaInfoForQuery.getPageSize();
        int start = (pageNo - 1) * pageSize;

        sql.append("LIMIT " + start + ", " + pageSize);

//        return tableMetaInfoMapper.selectTableMetaInfoVOList(sql.toString());
//        return getBaseMapper().selectTableMetaInfoVOList(sql.toString());
        return baseMapper.selectTableMetaInfoVOList(sql.toString());
    }

    /**
     * 元数据管理平台--查询方法--获取数据总数
     *
     * @param tableMetaInfoForQuery 查询条件: schemaName, tableName, dwLevel, pageSize, pageNo
     * @return
     */
    @Override
    public Integer getTableMetaInfoCount(TableMetaInfoForQuery tableMetaInfoForQuery) {
        //条件查询需要动态拼接SQL
        StringBuilder sql = new StringBuilder(
                "SELECT \n" +
                        "\tCOUNT(*) \n" +
                        "FROM table_meta_info ti LEFT JOIN table_meta_info_extra te \n" +
                        "ON ti.schema_name = te.schema_name \n" +
                        "AND ti.table_name = te.table_name \n" +
                        "WHERE ti.assess_date = (SELECT MAX(assess_date) FROM table_meta_info) "
        );

        //schemaName条件存在
        if (tableMetaInfoForQuery.getSchemaName() != null && !tableMetaInfoForQuery.getSchemaName().trim().isEmpty()) {
            sql.append("AND ti.schema_name = '" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getSchemaName().trim()) + "' \n");
        }

        //tableName条件存在
        if (tableMetaInfoForQuery.getTableName() != null && !tableMetaInfoForQuery.getTableName().trim().isEmpty()) {
            sql.append("AND ti.table_name LIKE '%" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getTableName().trim()) + "%' \n");
        }

        //dwLevel条件存在
        if (tableMetaInfoForQuery.getDwLevel() != null && !tableMetaInfoForQuery.getDwLevel().trim().isEmpty()) {
            sql.append("AND te.dw_level = '" + SqlUtil.filterUnsafeSql(tableMetaInfoForQuery.getDwLevel().trim()) + "' \n");
        }

        return baseMapper.selectTableMetaInfoCount(sql.toString());
    }

    /**
     * 提取HDFS元数据
     * 通过递归的方式 递归 表对应的路径下所有的文件和目录， 如果是文件，取文件的信息， 如果是目录， 继续进入到该目录中，取所有的文件和目录，依次类推...
     *
     * @param tableMetaInfo tableMetaInfo
     */
    private void extractTableMetaInfoFromHDFS(TableMetaInfo tableMetaInfo) throws URISyntaxException, IOException, InterruptedException {
        //1. 获取FileSystem文件系统对象
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(hdfsUris), conf, tableMetaInfo.getTableFsOwner());
        //2. 获取文件目录
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(tableMetaInfo.getTableFsPath()));
        //3.递归汇总
        addHdfsInfoToTableMetaInfo(fileSystem, fileStatuses, tableMetaInfo);
        tableMetaInfo.setFsCapcitySize(fileSystem.getStatus().getCapacity());
        tableMetaInfo.setFsUsedSize(fileSystem.getStatus().getUsed());
        tableMetaInfo.setFsRemainSize(fileSystem.getStatus().getRemaining());
    }

    /**
     * 递归汇总
     *
     * @param fileSystem
     * @param fileStatuses
     * @param tableMetaInfo
     * @throws IOException
     */
    private void addHdfsInfoToTableMetaInfo(FileSystem fileSystem, FileStatus[] fileStatuses, TableMetaInfo tableMetaInfo) throws IOException {
        //判断是不是文件
        for (FileStatus fileStatus : fileStatuses) {
            //文件-->汇总
            if (fileStatus.isFile()) {
                tableMetaInfo.setTableSize((tableMetaInfo.getTableSize() == null ? 0L : tableMetaInfo.getTableSize()) + fileStatus.getLen());
                tableMetaInfo.setTableTotalSize((tableMetaInfo.getTableTotalSize() == null ? 0L : tableMetaInfo.getTableTotalSize()) + fileStatus.getLen() * fileStatus.getReplication());
                long maxLastModifyTime = Math.max(tableMetaInfo.getCreateTime() == null ? 0L : tableMetaInfo.getCreateTime().getTime(), fileStatus.getModificationTime());
                tableMetaInfo.setTableLastModifyTime(new Date(maxLastModifyTime));
                long maxLastAccessTime = Math.max((tableMetaInfo.getTableLastAccessTime() == null ? 0L : tableMetaInfo.getTableLastAccessTime().getTime()), fileStatus.getAccessTime());
                tableMetaInfo.setTableLastAccessTime(new Date(maxLastAccessTime));
            } else {
                //文件夹-->下探
                FileStatus[] subFileStatuses = fileSystem.listStatus(fileStatus.getPath());
                addHdfsInfoToTableMetaInfo(fileSystem, subFileStatuses, tableMetaInfo);
            }
        }
    }

    /**
     * 从table对象中提取元数据， 存到TableMetaInfo对象中
     *
     * @param table 要赋值的表对象
     * @return TableMetaInfo
     */
    private TableMetaInfo extractTableMetaInfoFromHive(Table table) {
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        tableMetaInfo.setTableName(table.getTableName());
        tableMetaInfo.setSchemaName(table.getDbName());
        //过滤多余的属性，只保留"comment", "name", "type"
        PropertyPreFilters.MySimplePropertyPreFilter preFilter
                = new PropertyPreFilters().addFilter("comment", "name", "type");
        tableMetaInfo.setColNameJson(JSON.toJSONString(table.getSd().getCols(), preFilter));
        tableMetaInfo.setPartitionColNameJson(JSON.toJSONString(table.getPartitionKeys(), preFilter));
        tableMetaInfo.setTableFsOwner(table.getOwner());
        tableMetaInfo.setTableParametersJson(JSON.toJSONString(table.getParameters()));
        tableMetaInfo.setTableComment(table.getParameters().get("comment"));
        tableMetaInfo.setTableFsPath(table.getSd().getLocation());
        tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());
        tableMetaInfo.setTableOutputFormat(table.getSd().getOutputFormat());
        tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());
        tableMetaInfo.setTableCreateTime(new Date(table.getCreateTime() * 1000L));
        tableMetaInfo.setTableType(table.getTableType());
        tableMetaInfo.setTableBucketNum((long) table.getSd().getBucketColsSize());
        if (table.getSd().getBucketColsSize() != 0L) {
            tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols()));
            tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));
        }

        return tableMetaInfo;
    }

    /**
     * 创建hiveClient连接
     *
     * @PostConstruct注解的作用是当前类的对象被创建后，调用一次该方法
     */
    @PostConstruct
    public void createHiveClient() {
        Configuration conf = new Configuration();
        MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, hiveMetaStoreServerUris);
        try {
            hiveClient = new HiveMetaStoreClient(conf);
            System.out.println("hiveClient = " + hiveClient);
        } catch (MetaException e) {
            e.printStackTrace();
            throw new RuntimeException("创建hiveClient连接失败!!!");
        }
    }
}
