package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.PropertyPreFilter;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfoQuery;
import com.atguigu.dga.meta.bean.TableMetaInfoVO;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.atguigu.dga.util.SqlUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author liuxu
 * @since 2023-10-09
 */
@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {
    @Value("${hive.metastore.server.uri}")
    private String hiveMetaStoreUri;
    @Value("${hdfs.uri}")
    private String hdfsUri ;

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;

    @Autowired
    TableMetaInfoMapper tableMetaInfoMapper;



    //创建hive metastore client对象
    private IMetaStoreClient hiveClient;
    //创建hive的客户端对象

    @PostConstruct
    public void gethive() {

        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf,MetastoreConf.ConfVars.THRIFT_URIS,hiveMetaStoreUri);

        try {
            hiveClient=new HiveMetaStoreClient(hiveConf);

        } catch (MetaException e) {
            throw new RuntimeException(e);
        }

    }

    //初始化表的元数据信息


    @Override
    @DS("dga")
    public void initTableMetaInfo(String schemaName, String assessDate) throws Exception {

        //初始化之前,先清除表当然的数据
        remove(
                new QueryWrapper<TableMetaInfo>().eq("schema_name",schemaName).
                        eq("assess_date",assessDate)
                );
        //获取需考评库下的所有表的表名
        List<String> allTables = hiveClient.getAllTables(schemaName);

        //创建实体对象集合
        List<TableMetaInfo> tableMetaInfoList = new ArrayList<>(allTables.size());

        //循环获取各表的信息,并保存到集合对象
        for (String tableName : allTables) {
            //获取具体表的元数据信息对象
            Table table = hiveClient.getTable(schemaName, tableName);
            //System.out.println(JSON.toJSONString(table));
            //获取表的元数据并装到实体类中
            TableMetaInfo tableMetaInfo= extractTableMetaInfoFromHive(table);

            //提前hdfs的元数据信息
            extractTableMetaInfoFromHdfs(tableMetaInfo);

            tableMetaInfo.setAssessDate(assessDate);
            tableMetaInfo.setCreateTime(new Date());
            //放到集合中
            tableMetaInfoList.add(tableMetaInfo) ;
        }
        //批写到表中
        saveOrUpdateBatch(tableMetaInfoList) ;

        //初始化辅助信息表
        tableMetaInfoExtraService.initTableMetaInfoExtra(tableMetaInfoList);
    }

    private void extractTableMetaInfoFromHdfs(TableMetaInfo tableMetaInfo) throws Exception {
        //获取hdfs文件系统对象
        FileSystem fs= FileSystem.get(new URI(hdfsUri), new Configuration(), tableMetaInfo.getTableFsOwner());
        //获取指定HDFS路径下的所有文件信息
        FileStatus[] fileStatuses = fs.listStatus(new Path(tableMetaInfo.getTableFsPath()));
        //递归装载
        recursionExtractHdfsMetaInfo( fileStatuses , tableMetaInfo ,  fs) ;

        tableMetaInfo.setFsCapcitySize( fs.getStatus().getCapacity());
        tableMetaInfo.setFsUsedSize( fs.getStatus().getUsed());
        tableMetaInfo.setFsRemainSize( fs.getStatus().getRemaining()) ;

    }

    private void recursionExtractHdfsMetaInfo(FileStatus[] fileStatuses, TableMetaInfo tableMetaInfo, FileSystem fs) throws IOException {
        //遍历当前目录下的所有文件
        for (FileStatus fileStatus : fileStatuses) {
            //判断是否是文件,如果是则装载
            if(fileStatus.isFile()){
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize()+fileStatus.getLen());
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize() + fileStatus.getLen() *  fileStatus.getReplication());
                long lastModifyTimeMs =
                        Math.max(tableMetaInfo.getTableLastModifyTime() == null ? 0 :  tableMetaInfo.getTableLastModifyTime().getTime(), fileStatus.getModificationTime());
                tableMetaInfo.setTableLastModifyTime( new Date( lastModifyTimeMs) );
                long lastAccessTimeMs =
                        Math.max(tableMetaInfo.getTableLastAccessTime() == null ? 0 :  tableMetaInfo.getTableLastAccessTime().getTime(), fileStatus.getAccessTime());
                tableMetaInfo.setTableLastAccessTime( new Date( lastAccessTimeMs) );
            }else{
                // 下探
                // 先获取子目录下的所有内容
                FileStatus[] subFileStatuses = fs.listStatus(fileStatus.getPath());
                recursionExtractHdfsMetaInfo(subFileStatuses , tableMetaInfo , fs) ;
            }

        }
    }

    //将hive的表的元数据装载到实体类对象
    private TableMetaInfo extractTableMetaInfoFromHive(Table table) {
        //创建实体类对象
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        //转载
        tableMetaInfo.setTableName(table.getTableName());
        tableMetaInfo.setSchemaName(table.getDbName());
        //指定转json的时候保留那些字段
        PropertyPreFilters.MySimplePropertyPreFilter fieldFilter =
                new PropertyPreFilters().addFilter("comment", "name", "type");
        tableMetaInfo.setColNameJson(JSON.toJSONString(table.getSd().getCols(),fieldFilter));
        // 分区列
        tableMetaInfo.setPartitionColNameJson(JSON.toJSONString( table.getPartitionKeys() , fieldFilter));
        // 所有者
        tableMetaInfo.setTableFsOwner( table.getOwner());
        // 表参数
        tableMetaInfo.setTableParametersJson( JSON.toJSONString( table.getParameters()));
        // 表描述
        tableMetaInfo.setTableComment( table.getParameters().get("comment"));
        // 表路径
        tableMetaInfo.setTableFsPath( table.getSd().getLocation());
        // 输入格式
        tableMetaInfo.setTableInputFormat( table.getSd().getInputFormat());
        // 输出格式
        tableMetaInfo.setTableOutputFormat( table.getSd().getOutputFormat());
        // 行格式
        tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());
        // 创建时间
        tableMetaInfo.setTableCreateTime( new Date( table.getCreateTime() * 1000)  );
        // 表类型
        tableMetaInfo.setTableType( table.getTableType() );
        // 分桶数
        tableMetaInfo.setTableBucketNum( table.getSd().getNumBuckets() + 0L);
        if(tableMetaInfo.getTableBucketNum() > 0 ){
            // 分桶列
            tableMetaInfo.setTableBucketColsJson( JSON.toJSONString( table.getSd().getBucketCols() ));
            // 排序列
            tableMetaInfo.setTableSortColsJson( JSON.toJSONString( table.getSd().getSortCols()));
        }

        return tableMetaInfo;
    }

    @Override
    public List<TableMetaInfoVO> gettablelist(TableMetaInfoQuery tableMetaInfoQuery) {
        String sql= "SELECT \n" +
                "    ti.id , ti.table_name , ti.schema_name , ti.table_comment ,\n" +
                "    ti.table_size , ti.table_total_size , ti.table_last_modify_time ,\n" +
                "    ti.table_last_access_time ,te.tec_owner_user_name , te.busi_owner_user_name \n" +
                "FROM table_meta_info ti  JOIN table_meta_info_extra te \n" +
                "ON ti.table_name = te.table_name AND ti.schema_name = te.schema_name \n" +
                "WHERE ti.assess_date = (SELECT MAX(assess_date) FROM table_meta_info) \n";
        StringBuilder stringBuilder = new StringBuilder(sql);

        if(tableMetaInfoQuery.getTableName()!=null && !tableMetaInfoQuery.getTableName().trim().isEmpty()){
            stringBuilder.append("and ti.table_name like '%"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getTableName())+"%'");
        }
        if (tableMetaInfoQuery.getSchemaName()!=null && !tableMetaInfoQuery.getSchemaName().trim().isEmpty()){
            stringBuilder.append("and  ti.schema_name='"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getSchemaName())+"'");
        }
        if(tableMetaInfoQuery.getDwLevel()!=null && !tableMetaInfoQuery.getDwLevel().trim().isEmpty()){
            stringBuilder.append("and te.dw_level ='"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getDwLevel())+"'");
        }
        int start=(tableMetaInfoQuery.getPageNo()-1)* tableMetaInfoQuery.getPageSize();
        stringBuilder.append("limit "+ start +","+tableMetaInfoQuery.getPageSize() );

        List<TableMetaInfoVO> tableMetaInfoVOS = tableMetaInfoMapper.selectTableMetaInfoVoList(stringBuilder.toString());

        return tableMetaInfoVOS;
    }

    @Override
    public Integer gettablecount(TableMetaInfoQuery tableMetaInfoQuery) {
        String sql= "SELECT \n" +
                "    count(*) cnt\n" +
                "FROM table_meta_info ti  JOIN table_meta_info_extra te \n" +
                "ON ti.table_name = te.table_name AND ti.schema_name = te.schema_name \n" +
                "WHERE ti.assess_date = (SELECT MAX(assess_date) FROM table_meta_info) \n";
        StringBuilder stringBuilder = new StringBuilder(sql);

        if(tableMetaInfoQuery.getTableName()!=null && !tableMetaInfoQuery.getTableName().trim().isEmpty()){
            stringBuilder.append("and ti.table_name like '%"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getTableName())+"%'");
        }
        if (tableMetaInfoQuery.getSchemaName()!=null && !tableMetaInfoQuery.getSchemaName().trim().isEmpty()){
            stringBuilder.append("and  ti.schema_name='"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getSchemaName())+"'");
        }
        if(tableMetaInfoQuery.getDwLevel()!=null && !tableMetaInfoQuery.getDwLevel().trim().isEmpty()){
            stringBuilder.append("and te.dw_level ='"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getDwLevel())+"'");
        }


        Integer count = tableMetaInfoMapper.selectTableMetaInfoVocount(stringBuilder.toString());



        return count;
    }
}
