package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.PropertyPreFilter;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.dga.common.util.SqlUtil;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfoQuery;
import com.atguigu.dga.meta.bean.TableMetaInfoVO;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author zhangchen
 * @since 2024-11-19
 */
@Service
@Slf4j
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;


    HiveMetaStoreClient hiveMetaStoreClient;
    @Value("${hive.metastore.uri}")
    String hiveMetaStoreUrl ;

    @Value("${governance.schema}")
    String defaultSchema;

    @PostConstruct
    public   void  initHiveMetaStoreClient()   {
        HiveConf hiveConf = new HiveConf();
        hiveConf.set("hive.metastore.uris", hiveMetaStoreUrl);
        try {
               hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
        }catch (MetaException e){
            log.warn("hive元数据服务连接失败 ..");
        }
    }

    public void  extractMetaData(String schemaName,String assessDate) throws Exception {
        System.out.println("hiveMetaStoreUrl = " + hiveMetaStoreUrl);
        //1  提取hive元数据
        //metastore service
        if(schemaName==null||schemaName.length()==0){
            schemaName=defaultSchema;
        }
        List<String> tableNameList = hiveMetaStoreClient.getAllTables(schemaName);
        List<TableMetaInfo>  tableMetaInfoList=new ArrayList<>(tableNameList.size());
        for (String tableName : tableNameList) {
            TableMetaInfo tableMetaInfo = extractMetaFromHive(schemaName,tableName);// hive
            addHdfsInfo(  tableMetaInfo); //hdfs

            tableMetaInfo.setAssessDate(assessDate);
            tableMetaInfo.setCreateTime(new Date());

            tableMetaInfoList.add(tableMetaInfo);
            System.out.println("tableMetaInfo = " + tableMetaInfo);
        }

        System.out.println("tableNameList = " + tableNameList);

        //2  保存到mysql中 table_meta_info
        // 幂等性问题
        // 每次写入 要主动清空当日数据
        this.remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate));

        saveBatch(tableMetaInfoList);

        //3 为还没有辅助信息的表，初始化辅助信息
         //sql 存在于table_meta_info中但是不存在与table_meta_info_extra的表
        //  select * from  table_meta_info where assess_data= ''  and
        //  concat(schema_name ,'|', table_name) not in    (select concat(schema_name ,'|', table_name) from table_meta_info_extra)
        QueryWrapper<TableMetaInfo> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("assess_date",assessDate);
        queryWrapper.notInSql("concat(schema_name ,'|', table_name)","select concat(schema_name ,'|', table_name) from table_meta_info_extra");
        List<TableMetaInfo> tableMeteInfoWithoutExtraList = this.list(queryWrapper);

        //针对这些为初始化的表，进行辅助信息初始化
        tableMetaInfoExtraService.initExtra(tableMeteInfoWithoutExtraList);

    }


    public  TableMetaInfo   extractMetaFromHive(String schemaName,String tableName) throws TException {
        Table table = hiveMetaStoreClient.getTable(schemaName, tableName);
        System.out.println("table = " + table);
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        tableMetaInfo.setTableName(tableName);
        tableMetaInfo.setSchemaName(schemaName);

        //表字段值

        PropertyPreFilters.MySimplePropertyPreFilter filter = new PropertyPreFilters().addFilter("name", "comment", "type");
        String colNameJson = JSON.toJSONString(table.getSd().getCols(),filter);
        tableMetaInfo.setColNameJson(colNameJson);
        //分区字段
        String partitionColNameJson = JSON.toJSONString(table.getPartitionKeys(),filter);
        tableMetaInfo.setPartitionColNameJson(partitionColNameJson);
        //表owner
        tableMetaInfo.setTableFsOwner(table.getOwner());
        //表参数
        tableMetaInfo.setTableParametersJson(JSON.toJSONString(table.getParameters()));
        //表备注
        tableMetaInfo.setTableComment(table.getParameters().get("comment"));
        //表路径
        tableMetaInfo.setTableFsPath(table.getSd().getLocation());
        //输入输出格式
        tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());
        tableMetaInfo.setTableOutputFormat(table.getSd().getOutputFormat());
        //行格式
        tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());
        //创建时间
        tableMetaInfo.setTableCreateTime(table.getCreateTime()+"");
        // 时间戳转为字符串
        Date date = new Date(table.getCreateTime() * 1000L);
        String createDateTime = DateFormatUtils.format(date, "yyyy-MM-dd HH:mm:ss");
        tableMetaInfo.setTableCreateTime(createDateTime);
        //表类型
        tableMetaInfo.setTableType(table.getTableType());
        //桶字段
        tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols()));
        tableMetaInfo.setTableBucketNum((long) table.getSd().getNumBuckets());
        tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));

        return tableMetaInfo;
    }


    //1  准备阶段
    //   a  准备遍历的起点 ：  一级子节点
    //   b 准备遍历的工具 :     展开子节点的工具
    //   c   准备存储结果的容器：  遍历过程中持续累加或比较 计算…… 的结果容器
    //2  遍历阶段
    //    循环节点集{
    //       中间节点：    1  可能会计算  2 展开子节点 3  逐个用自己的方法调用子节点
    //       叶子节点：    1  计算  2 返回
    //}
    public  void  addHdfsInfo(TableMetaInfo tableMetaInfo) throws  Exception {
        //1  准备阶段

        //   a   准备遍历的工具 :     展开子节点的工具  FileSystem
        FileSystem fileSystem = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()), new Configuration(), tableMetaInfo.getTableFsOwner());
        //   b  准备遍历的起点 ：  一级子节点
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(tableMetaInfo.getTableFsPath()));
        //   c   准备存储结果的容器：  遍历过程中持续累加或比较 计算…… 的结果容器 tableMetaInfo

        //2  遍历阶段
        getHdfsInfoRec(tableMetaInfo,fileStatuses,fileSystem);


        //3 环境信息  总容量  总使用量  总剩余量
        tableMetaInfo.setFsCapcitySize(fileSystem.getStatus().getCapacity());
        tableMetaInfo.setFsUsedSize(fileSystem.getStatus().getUsed());
        tableMetaInfo.setFsRemainSize(fileSystem.getStatus().getRemaining());


        System.out.println("tableMetaInfo = " + tableMetaInfo);

    }
    //2  遍历阶段
    //    循环节点集{
    //       中间节点：    1  可能会计算  2 展开子节点 3  逐个用自己的方法调用子节点
    //       叶子节点：    1  计算  2 返回
    //}
    public  void getHdfsInfoRec(TableMetaInfo tableMetaInfo, FileStatus[] fileStatuses,FileSystem fileSystem) throws IOException {
        for (FileStatus fileStatus : fileStatuses) {
            if(fileStatus.isDirectory()){
               // 中间节点：    1  可能会计算  2 展开子节点 3  逐个用自己的方法调用子节点
                FileStatus[] subFileStatus = fileSystem.listStatus(fileStatus.getPath());
                getHdfsInfoRec(tableMetaInfo,subFileStatus,fileSystem);
            }else{//叶子节点：    1  计算  2 返回
                // 累加文件大小   累加文件副本大小    比较最大访问时间  比较最大修改时间
                tableMetaInfo.setTableSize( tableMetaInfo.getTableSize()+ fileStatus.getLen());
                tableMetaInfo.setTableTotalSize( tableMetaInfo.getTableTotalSize()+ fileStatus.getLen()*fileStatus.getReplication());
                //比较最大访问时间
                if( tableMetaInfo.getTableLastAccessTime()==null){
                    tableMetaInfo.setTableLastAccessTime(  new Date(fileStatus.getAccessTime())  );
                }else if(tableMetaInfo.getTableLastAccessTime().getTime() <fileStatus.getAccessTime()){
                    tableMetaInfo.setTableLastAccessTime(  new Date(fileStatus.getAccessTime())  );
                }
                //比较最大修改时间
                if( tableMetaInfo.getTableLastModifyTime()==null){
                    tableMetaInfo.setTableLastModifyTime(  new Date(fileStatus.getModificationTime())  );
                }else if(tableMetaInfo.getTableLastModifyTime().getTime() <fileStatus.getModificationTime()){
                    tableMetaInfo.setTableLastModifyTime(  new Date(fileStatus.getModificationTime())  );
                }

            }

        }


    }


    @Override
    public List<TableMetaInfoVO> getTableMetaForQuery(TableMetaInfoQuery tableMetaInfoQuery) {
        // select  * from table_meta_info tm  left join table_meta_info_extra te on te.schema_name=tm.schema_name
        // and te.table_name=tm.table_name
        // and tm.assess_date = (select max(assess_data) from table_meta_info tmi
        //                        where tm.schema_name =tmi.schema_name and tm.table_name =tmi.table_name  )
        String sql =" select  tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time" +
                "  from table_meta_info tm  inner join table_meta_info_extra te on te.schema_name=tm.schema_name\n" +
                "         and te.table_name=tm.table_name\n" +
                "          and tm.assess_date = (select max(assess_date) from table_meta_info tmi\n" +
                "                                 where tm.schema_name =tmi.schema_name and tm.table_name =tmi.table_name  )";
        //查询条件
        if(tableMetaInfoQuery.getSchemaName()!=null&&tableMetaInfoQuery.getSchemaName().trim().length()>0){
            sql+= " and  tm.schema_name like '%"+ SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getSchemaName()) +"%'";
        }
        if(tableMetaInfoQuery.getTableName()!=null&&tableMetaInfoQuery.getTableName().trim().length()>0){
            sql+= " and  tm.table_name like '%"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getTableName())+"%'";
        }
        if(tableMetaInfoQuery.getDwLevel()!=null&&tableMetaInfoQuery.getDwLevel().trim().length()>0){
            sql+= " and  te.dw_level = '"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getDwLevel())+"'";
        }

        //分页
        Integer pageNo = tableMetaInfoQuery.getPageNo();
        Integer pageSize = tableMetaInfoQuery.getPageSize();
        Integer startIndex = (pageNo-1)*pageSize;

        sql += " limit "+startIndex+","+pageSize;

        List<TableMetaInfoVO> tableMetaInfoVOList = this.baseMapper.getTableMetaInfoQuery(sql);

        return tableMetaInfoVOList;
    }

    @Override
    public Integer getTableMetaTotalForQuery(TableMetaInfoQuery tableMetaInfoQuery) {
        String sql =" select  count(*)"  +
                "  from table_meta_info tm  inner join table_meta_info_extra te on te.schema_name=tm.schema_name\n" +
                "         and te.table_name=tm.table_name\n" +
                "          and tm.assess_date = (select max(assess_date) from table_meta_info tmi\n" +
                "                                 where tm.schema_name =tmi.schema_name and tm.table_name =tmi.table_name  )";
        //查询条件
        if(tableMetaInfoQuery.getSchemaName()!=null&&tableMetaInfoQuery.getSchemaName().trim().length()>0){
            sql+= " and  tm.schema_name like '%"+ SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getSchemaName()) +"%'";
        }
        if(tableMetaInfoQuery.getTableName()!=null&&tableMetaInfoQuery.getTableName().trim().length()>0){
            sql+= " and  tm.table_name like '%"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getTableName())+"%'";
        }
        if(tableMetaInfoQuery.getDwLevel()!=null&&tableMetaInfoQuery.getDwLevel().trim().length()>0){
            sql+= " and  te.dw_level = '"+SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getDwLevel())+"'";
        }


        Integer total = this.baseMapper.getTableMetaTotalQuery(sql);
        return total;
    }

    @Override
    public List<TableMetaInfo> getTableMetaInfoListWithExtra(String assessDate) {

        List<TableMetaInfo> tableMetaInfoList=  baseMapper.selectTableMetaInfoListWithExtra(assessDate);
        return tableMetaInfoList;
    }
}
