package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.dga.common.SqlUtil;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.bean.TableMetaInfoExtra;
import com.atguigu.dga.meta.bean.TableMetaInfoQuery;
import com.atguigu.dga.meta.bean.TableMetaInfoVO;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoExtraService;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;

import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.thrift.TException;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREURIS;

/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author zhangchen
 * @since 2023-06-27
 */
@Service
@DS("dga")
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {


    @Autowired
    TableMetaInfoExtraService tableMetaInfoExtraService;

    HiveMetaStoreClient hiveMetaStoreClient = null;  //饿汉

    @Value("${hive.meta-server.url}")
    String hiveMetaStoreUri;


    @PostConstruct  //启动容器时执行 后置执行
    public void initHiveClient() {
        HiveConf hiveConf = new HiveConf();
        hiveConf.setVar(METASTOREURIS, hiveMetaStoreUri);
        try {
            hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }
    }


    public void initTableMetaInfo(String assessDate, String schemaName) throws Exception {
        remove(new QueryWrapper<TableMetaInfo>().eq("assess_date", assessDate).eq("schema_name", schemaName));

        // 获得 hivemetastoreclient 对象
        List<String> tableNameList = hiveMetaStoreClient.getAllTables(schemaName);
        List<TableMetaInfo> tableMetaInfoList = new ArrayList<>(tableNameList.size());
        for (String tableName : tableNameList) {
            //hive
            TableMetaInfo tableMetaInfo = getTableMeta(schemaName, tableName);
            //hdfs
            addHdfsInfo(tableMetaInfo);

            tableMetaInfo.setAssessDate(assessDate);
            tableMetaInfo.setCreateTime(new Date());

            tableMetaInfoList.add(tableMetaInfo);
        }

        saveOrUpdateBatch(tableMetaInfoList);

        //System.out.println(tableNameList);
        tableMetaInfoExtraService.initTableMetaInfoExtra(tableMetaInfoList, assessDate);


    }


    //补充 hdfs的元数据信息
    private void addHdfsInfo(TableMetaInfo tableMetaInfo) throws Exception {
        //获得客户端对象
        String tableFsPath = tableMetaInfo.getTableFsPath();
        String tableFsOwner = tableMetaInfo.getTableFsOwner();
        FileSystem fileSystem = FileSystem.get(new URI(tableFsPath), new Configuration(), tableFsOwner);

        //利用filesystem 获得某个目录下的文件对象(文件和文件夹)集合
        boolean exists = fileSystem.exists(new Path(tableFsPath));
        if (exists) {
            FileStatus[] fileStatuses = fileSystem.listStatus(new Path(tableFsPath));
            //目标获得 tableFsPath 目录下 所有文件 和 子目录下的文件  总大小
            addHdfsInfoRec(tableMetaInfo, fileStatuses, fileSystem);
        }
        tableMetaInfo.setFsCapcitySize(fileSystem.getStatus().getCapacity());
        tableMetaInfo.setFsRemainSize(fileSystem.getStatus().getRemaining());
        tableMetaInfo.setFsUsedSize(fileSystem.getStatus().getUsed());


        System.out.println("tableMetaInfo = " + tableMetaInfo.getTableName() + ":" + tableMetaInfo.getTableSize());

    }


    //利用递归处理文件（目录）集合
    //对应文件树的访问
    //
    //  1   叶子节点  文件    处理 累加文件大小
    //  2   中间节点  目录    无处理
    //                                获得目录下的所有文件对象
    //                                继续下探
    private void addHdfsInfoRec(TableMetaInfo tableMetaInfo, FileStatus[] fileStatuses, FileSystem fileSystem) throws Exception {
        for (FileStatus fileStatus : fileStatuses) {

            if (fileStatus.isFile()) {
                //1   叶子节点  文件    处理 累加文件大小
                tableMetaInfo.setTableSize(tableMetaInfo.getTableSize() + fileStatus.getLen());
                tableMetaInfo.setTableTotalSize(tableMetaInfo.getTableTotalSize() + fileStatus.getLen() * fileStatus.getReplication());
                long lastModifyTs = 0L;
                //最后修改时间
                if (tableMetaInfo.getTableLastModifyTime() != null) {
                    lastModifyTs = Math.max(tableMetaInfo.getTableLastModifyTime().getTime(), fileStatus.getModificationTime());
                } else {
                    lastModifyTs = fileStatus.getModificationTime();
                }
                tableMetaInfo.setTableLastModifyTime(new Date(lastModifyTs));

                //最后访问时间
                long lastAccessTs = 0L;
                if (tableMetaInfo.getTableLastAccessTime() != null) {
                    lastAccessTs = Math.max(tableMetaInfo.getTableLastAccessTime().getTime(), fileStatus.getAccessTime());
                } else {
                    lastAccessTs = fileStatus.getAccessTime();
                }
                tableMetaInfo.setTableLastAccessTime(new Date(lastAccessTs));

            } else {
                // 2   中间节点  目录    无处理
                // 获得目录下的所有文件对象
                //     继续下探 (回调自己)
                FileStatus[] subFileStatus = fileSystem.listStatus(fileStatus.getPath());
                addHdfsInfoRec(tableMetaInfo, subFileStatus, fileSystem);
            }

        }


    }


    //以下测试方式不可行 因为没有容器启动的过程 容器启动过程中产生的组件和调用的方法 都不存在了
//    public static void main(String[] args) throws MetaException {
//         //new TableMetaInfoServiceImpl().initTableMetaInfo("2023-05-01","gmall");
//    }


    public TableMetaInfo getTableMeta(String schemaName, String tableName) throws Exception {
        //1 是从hive中提取table对象
        Table table = hiveMetaStoreClient.getTable(schemaName, tableName);

        //2 再把Table对象 提取到 程序本身需要的对象TableMetaInfo
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        tableMetaInfo.setTableName(tableName);
        tableMetaInfo.setSchemaName(schemaName);

        //2.1 把列对象转为json
        List<FieldSchema> colList = table.getSd().getCols();
        //加入字段过滤
        PropertyPreFilters.MySimplePropertyPreFilter preFilter = new PropertyPreFilters().addFilter("name", "type", "comment");
        String colJson = JSON.toJSONString(colList, preFilter);
        tableMetaInfo.setColNameJson(colJson);

        //2.2 分区
        List<FieldSchema> partitionList = table.getPartitionKeys();
        String partitionJson = JSON.toJSONString(partitionList, preFilter);
        tableMetaInfo.setPartitionColNameJson(partitionJson);

        //2.3 owner
        tableMetaInfo.setTableFsOwner(table.getOwner());

        //2.4 其他参数
        tableMetaInfo.setTableParametersJson(JSON.toJSONString(table.getParameters()));

        //2.5 表备注
        tableMetaInfo.setTableComment(table.getParameters().get("comment"));

        //2.6 路径
        tableMetaInfo.setTableFsPath(table.getSd().getLocation());

        //2.7 格式
        tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());
        tableMetaInfo.setTableOutputFormat(table.getSd().getOutputFormat());
        tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());

        //2.8 创建时间
        Date createDate = new Date(table.getCreateTime() * 1000L);
        String createDateString = DateFormatUtils.format(createDate, "yyyy-MM-dd HH:mm:ss");
        tableMetaInfo.setTableCreateTime(createDateString);

        //2.9 表类型
        tableMetaInfo.setTableType(table.getTableType());

        //2.10 分桶
        tableMetaInfo.setTableBucketNum(table.getSd().getNumBuckets() + 0L);
        tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(table.getSd().getBucketCols()));
        tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));

        System.out.println("tableMetaInfo = " + tableMetaInfo);


        return tableMetaInfo;
    }

//    @Autowired
//    TableMetaInfoMapper tableMetaInfoMapper;

    public List<TableMetaInfoVO> getTableMetaList(TableMetaInfoQuery tableMetaInfoQuery) {


        StringBuilder sqlSB = new StringBuilder();
        sqlSB.append("select tm.id ,tm.table_name,tm.schema_name,table_comment,table_size,table_total_size,tec_owner_user_name,busi_owner_user_name, table_last_access_time,table_last_modify_time")
                .append(" from table_meta_info tm  join table_meta_info_extra te")
                .append(" on tm.table_name = te.table_name and tm.schema_name=te.schema_name ")
                .append(" where tm.assess_date = (select max(assess_date) from table_meta_info tmi  where tmi.table_name=tm.table_name  and tmi.schema_name=tm.schema_name)  ");

        if (tableMetaInfoQuery.getSchemaName() != null && tableMetaInfoQuery.getSchemaName().trim().length() > 0) {
            sqlSB.append(" and  tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getSchemaName()) + "%'");
        }
        if (tableMetaInfoQuery.getTableName() != null && tableMetaInfoQuery.getTableName().trim().length() > 0) {
            sqlSB.append(" and  tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getTableName()) + "%'");
        }
        if (tableMetaInfoQuery.getDwLevel() != null && tableMetaInfoQuery.getDwLevel().trim().length() > 0) {
            sqlSB.append(" and  te.dw_level = '" + SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getDwLevel()) + "'");
        }

        int limitRowNo = (tableMetaInfoQuery.getPageNo() - 1) * tableMetaInfoQuery.getPageSize();
        int pageSize = tableMetaInfoQuery.getPageSize();
        sqlSB.append(" limit " + limitRowNo + "," + pageSize);


        // select   根据前端需要的字段 写清楚
        // from table_meta_info tm  join table_meta_info_extra te
        // on tm.table_name = te.table_name and tm.schema_name=te.schema_name
        // where  tm.assess_date = (select max(assess_date) from table_meta_info tmi  where tmi.table_name=tm.table_name  and tmi.schema_name=tm.schema_name)    )
        //  and tm.schema_name  like '%xxxx%'
        //  and tm.table_name  like '%xxx%'
        //  and te.dw_level  = ''
        //  limit  行号,每页行数

        // 行号= (页码-1 ）* 每页行数

        List<TableMetaInfoVO> tableMetaInfoVOList = this.baseMapper.selectTableMetaListPage(sqlSB.toString());

        return tableMetaInfoVOList;

    }

    @Override
    public List<TableMetaInfo> getTableMetaInfoAllList(String assessDate) {
        List<TableMetaInfo> tableMetaInfoList = baseMapper.selectTableMetaAllList(assessDate);

        return tableMetaInfoList;
    }

    public Integer getTableMetaTotal(TableMetaInfoQuery tableMetaInfoQuery) {

        StringBuilder sqlSB = new StringBuilder();
        sqlSB.append("select  count(*) ct")
                .append(" from table_meta_info tm  join table_meta_info_extra te")
                .append(" on tm.table_name = te.table_name and tm.schema_name=te.schema_name ")
                .append(" where tm.assess_date = (select max(assess_date) from table_meta_info tmi  where tmi.table_name=tm.table_name  and tmi.schema_name=tm.schema_name)  ");

        if (tableMetaInfoQuery.getSchemaName() != null && tableMetaInfoQuery.getSchemaName().trim().length() > 0) {
            sqlSB.append(" and  tm.schema_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getSchemaName()) + "%'");
        }
        if (tableMetaInfoQuery.getTableName() != null && tableMetaInfoQuery.getTableName().trim().length() > 0) {
            sqlSB.append(" and  tm.table_name like '%" + SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getTableName()) + "%'");
        }
        if (tableMetaInfoQuery.getDwLevel() != null && tableMetaInfoQuery.getDwLevel().trim().length() > 0) {
            sqlSB.append(" and  te.dw_level = '" + SqlUtil.filterUnsafeSql(tableMetaInfoQuery.getDwLevel()) + "'");
        }
        Integer total = baseMapper.selectTableMetaCount(sqlSB.toString());
        return total;

    }
}