package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SimplePropertyPreFilter;
import com.atguigu.dga.meta.bean.TableMetaForQuery;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfoExtra;
import com.atguigu.dga.meta.bean.TableMetaInfoVO;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.atguigu.dga.util.SqlUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author zhangchen
 * @since 2023-11-18
 */
@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {

    @Value("${metastore.url}")  //顺序1
    String metaUrl;


    IMetaStoreClient iMetaStoreClient =null;// initClient();

    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;

    @PostConstruct   //顺序2
    public    void  initClient(){
        try{
            HiveConf hiveConf=new HiveConf();
            hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,metaUrl );

            iMetaStoreClient=  new HiveMetaStoreClient(hiveConf);
        }catch (Exception e){
            e.printStackTrace();
            throw  new RuntimeException("初始化hive客户端异常");
        }

    }



    //顺序3
    public  void  initTableMeta(String assessDate ,String dbName ) throws  Exception {
        //幂等处理清理当天已存在的数据
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date",assessDate));

        System.out.println("metaUrl = " + metaUrl);
        List<String> tableList = iMetaStoreClient.getAllTables(dbName);

        List<TableMetaInfo> tableMetaInfoList=new ArrayList<>( tableList.size());  //提前设置容量，避免扩容

        for (String tableName : tableList) {
            TableMetaInfo tableMetaInfo = new TableMetaInfo();
            tableMetaInfo.setTableName(tableName);
            tableMetaInfo.setSchemaName(dbName);
            //1  提取hive信息
            extractMetaFromHive(tableMetaInfo);
            try {//2提取hdfs信息
                extractMetaFromHdfs(tableMetaInfo);
            }catch (Exception e){
                e.printStackTrace();
            }
            //3 补充时间信息
            tableMetaInfo.setAssessDate(assessDate);
            tableMetaInfo.setCreateTime(new Date());

            tableMetaInfoList.add(tableMetaInfo);

        }
        saveOrUpdateBatch(tableMetaInfoList);

        // 补充辅助信息表
        QueryWrapper<TableMetaInfo> queryWrapper = new QueryWrapper<TableMetaInfo>().eq("assess_date",assessDate)
                .notInSql("concat(table_name,',',schema_name)", "select concat(table_name,',',schema_name)  from table_meta_info_extra");

        List<TableMetaInfo> tableMetaInfoNonExistsExtraList =  list(queryWrapper);


        tableMetaInfoExtraService.initTableMetaInfoExtra(tableMetaInfoNonExistsExtraList);



     //   System.out.println("tableList = " + tableList);
    }
    //1  准备阶段
    //         1.1  访问工具   (hdfs)
    //         1.2   递归起点
    //         1.3   收集数据的容器
    private void extractMetaFromHdfs(TableMetaInfo tableMetaInfo) throws  Exception {

            //1.1  访问工具   (hdfs)
        FileSystem fileSystem = FileSystem.get(new URI(tableMetaInfo.getTableFsPath()),new Configuration(),tableMetaInfo.getTableFsOwner());
        //         1.2   递归起点  1级子目录的清单
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(tableMetaInfo.getTableFsPath()));
        // 1.3   收集数据的容器 tableMetaInfo

        //  进行递归操作
        getMetaFromHdfsRec(fileSystem, fileStatuses, tableMetaInfo);

        System.out.println(tableMetaInfo.getTableName()+  "：：：tableMetaInfo.getTableSize() = " + tableMetaInfo.getTableSize());

        // 1.4 赋予环境信息
        tableMetaInfo.setFsCapcitySize(fileSystem.getStatus().getCapacity());
        tableMetaInfo.setFsRemainSize(fileSystem.getStatus().getRemaining());
        tableMetaInfo.setFsUsedSize(fileSystem.getStatus().getUsed());


        //1.5 关闭
        fileSystem.close();

    }


    // 递归过程
    //         1       遍历到的节点是中间节点
    //处理计算     收集数据    下探展开    递归的回调
    //          2     还是 叶子结点
    //                         处理计算     收集数据  返回
    private void getMetaFromHdfsRec(FileSystem fileSystem, FileStatus[] fileStatuses, TableMetaInfo tableMetaInfo) throws IOException {
        //0 遍历递归对象集合
        for (FileStatus fileStatus : fileStatuses) {
           if(  fileStatus.isDirectory()){
               //         1       遍历到的节点是中间节点 目录
               //处理计算     收集数据    下探展开    递归的回调
               FileStatus[] subFileStatus = fileSystem.listStatus(fileStatus.getPath());
               getMetaFromHdfsRec(fileSystem,subFileStatus,tableMetaInfo);
           }else {
               //          2     还是 叶子结点    文件

               // 统计大小
               long len = fileStatus.getLen();
               tableMetaInfo.setTableSize (tableMetaInfo.getTableSize()+len);
               // 结合副本数统计大小
               tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize()+ len* fileStatus.getReplication());
               // 最后修改时间
               Date lastModifyDate = new Date(fileStatus.getModificationTime());
               if( tableMetaInfo.getTableLastModifyTime()==null )       {
                   tableMetaInfo.setTableLastModifyTime(lastModifyDate);
               } else if (tableMetaInfo.getTableLastModifyTime().compareTo(lastModifyDate)<0) { //前小后大
                   tableMetaInfo.setTableLastModifyTime(lastModifyDate);
               };
               // 最后访问时间
               Date lastAccessDate = new Date(fileStatus.getAccessTime());
               if( tableMetaInfo.getTableLastAccessTime()==null )       {
                   tableMetaInfo.setTableLastAccessTime(lastAccessDate);
               } else if (tableMetaInfo.getTableLastAccessTime().compareTo(lastAccessDate)<0) { //前小后大
                   tableMetaInfo.setTableLastAccessTime(lastAccessDate);
               }
           }
        }
    }


    private  void  extractMetaFromHive(  TableMetaInfo tableMetaInfo) throws TException {
        Table table = iMetaStoreClient.getTable(tableMetaInfo.getSchemaName(), tableMetaInfo.getTableName());
      //  System.out.println("table = " + table);
        //字段

        //字段过滤器
        SimplePropertyPreFilter simplePropertyPreFilter = new SimplePropertyPreFilter("name", "type", "comment");

        List<FieldSchema> cols = table.getSd().getCols();
        String colJsonString = JSON.toJSONString(cols,simplePropertyPreFilter);
        tableMetaInfo.setColNameJson(colJsonString);

        //分区字段
        String partColJson = JSON.toJSONString(table.getPartitionKeys(),simplePropertyPreFilter);
        tableMetaInfo.setPartitionColNameJson(partColJson);

        //owner
        tableMetaInfo.setTableFsOwner( table.getOwner());

        //参数信息
        tableMetaInfo.setTableParametersJson(JSON.toJSONString(table.getParameters()));

        //表备注
        String tableComment = table.getParameters().get("comment");
        tableMetaInfo.setTableComment(tableComment);

        //表路径
        String location = table.getSd().getLocation();
        tableMetaInfo.setTableFsPath(location);
        //输入格式
        String inputFormat = table.getSd().getInputFormat();
        tableMetaInfo.setTableInputFormat(inputFormat);
        //输出格式
        String outputFormat = table.getSd().getOutputFormat();
        tableMetaInfo.setTableOutputFormat(outputFormat);
        //序列化器
        String serializationLib = table.getSd().getSerdeInfo().getSerializationLib();
        tableMetaInfo.setTableRowFormatSerde(serializationLib);

        //创建时间
        Date createDate = new Date(table.getCreateTime() * 1000L);
        String createDateStr = DateFormatUtils.format(createDate, "yyyy-MM-dd");
        tableMetaInfo.setTableCreateTime(createDateStr);

        //表类型
        tableMetaInfo.setTableType(table.getTableType());

        //分桶信息
        tableMetaInfo.setTableBucketNum(table.getSd().getNumBuckets()+0L);
        tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols(),simplePropertyPreFilter) );
        tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols(),simplePropertyPreFilter) );


    }




    @Override
    public List<TableMetaInfoVO> getTableMetaListForQuery(TableMetaForQuery tableMetaForQuery) {





        //以下由于反复连接数据库，所以不推荐
//        List<TableMetaInfo> tableMetaInfoList = this.list(new QueryWrapper<TableMetaInfo>().eq("schema_name", tableMetaForQuery.getSchemaName()));
//        for (TableMetaInfo tableMetaInfo : tableMetaInfoList) {
//            TableMetaInfoExtra tableMetaInfoExtra = tableMetaInfoExtraService.getOne(new QueryWrapper<TableMetaInfoExtra>().eq("schema_name", tableMetaInfo.getSchemaName())
//                    .eq("table_name", tableMetaInfo.getTableName()));
//
//            TableMetaInfoVO tableMetaInfoVO = new TableMetaInfoVO();
//            //赋值
//
//            tableMetaInfoVOList.add(tableMetaInfoVO);
//        }


        //  select  from  table_meta_info  ti   join  table_meta_info_extra te  on ti.schema_name=te.schema_name
          // and ti.table_name=te.table_name
        //  where   assess_date = (select max(assess_date) from table_meta_info ti2
        //                         where ti.table_name =ti2.table_name and ti.schema_name=ti2.schema_name  )  //同表名库名的最大考评日期
        //     and  table_name =''     （动态拼接)
        //     and  schema_name=xxx
        //     and dwLevel =xx
        //     limit  rowNo,pageSize
        //   pageNo--> rowNo        rowNo= ( pageNo-1)*pageSize

        StringBuilder  sqlBuilder= new StringBuilder(1000);
        sqlBuilder.append("select ti.id ,ti.table_name,ti.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time from  table_meta_info  ti   join  table_meta_info_extra te  on ti.schema_name=te.schema_name\n" +
                "          and ti.table_name=te.table_name " +
                "       where   assess_date = (select max(assess_date) from table_meta_info ti2\n" +
                "                            where ti.table_name =ti2.table_name and ti.schema_name=ti2.schema_name  ) ");

        if(tableMetaForQuery.getTableName()!=null&&tableMetaForQuery.getTableName().trim().length()>0){
            sqlBuilder.append(" and  ti.table_name  like  '%").append(SqlUtil.filterUnsafeSql( tableMetaForQuery.getTableName())).append("%'");
        }
        if(tableMetaForQuery.getSchemaName()!=null&&tableMetaForQuery.getSchemaName().trim().length()>0){
            sqlBuilder.append(" and  ti.schema_name  like  '%").append(SqlUtil.filterUnsafeSql(tableMetaForQuery.getSchemaName())).append("%'");
        }
        if(tableMetaForQuery.getDwLevel()!=null&&tableMetaForQuery.getDwLevel().trim().length()>0){
            sqlBuilder.append(" and  te.dw_level  =  '").append(SqlUtil.filterUnsafeSql(tableMetaForQuery.getDwLevel())).append("'");
        }

        int rowNo= (tableMetaForQuery.getPageNo()-1)*tableMetaForQuery.getPageSize();
        sqlBuilder.append(" limit ").append(rowNo).append(",").append(tableMetaForQuery.getPageSize());


        List<TableMetaInfoVO> tableMetaInfoVOList  = baseMapper.getTableMetaInfoListForQuery(sqlBuilder.toString());


        return tableMetaInfoVOList;
    }

    @Override
    public List<TableMetaInfo> getTableMetaInfoWithExtraList(String assessDate) {
        //  join
        //1 由于需要避免循环查询数据库  所以要是用带join的sql 来进行查询
        //2 查询结果如何封装到一个有父子嵌套的对象中  需要借助xml 来配置映射关系
//        List<TableMetaInfo> tableMetaInfoList = list(new QueryWrapper<>());
//        for (TableMetaInfo tableMetaInfo : tableMetaInfoList) {
//            TableMetaInfoExtra tableMetaInfoExtra = tableMetaInfoExtraService.getOne(new QueryWrapper<>());
//            tableMetaInfo.setTableMetaInfoExtra(tableMetaInfoExtra);
//        }
        List<TableMetaInfo> tableMetaInfoWithExtraList = baseMapper.getTableMetaInfoWithExtraList(assessDate);


        return tableMetaInfoWithExtraList;
    }
}
