package com.atguigu.dga.meta.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.meta.mapper.TableMetaInfoMapper;
import com.atguigu.dga.meta.service.TableMetaInfoService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;


import java.util.Date;
import java.util.List;
import java.util.Map;


/**
 * <p>
 * 元数据表 服务实现类
 * </p>
 *
 * @author hzh
 * @since 2023-11-15
 */
@Service
public class TableMetaInfoServiceImpl extends ServiceImpl<TableMetaInfoMapper, TableMetaInfo> implements TableMetaInfoService {
    @Value("${hive.metaserver.url}")
    String hiveMetaServerUrl=null;

    IMetaStoreClient hiveClient = getHiveClient();
    //初始化hive客户端
    private IMetaStoreClient getHiveClient(){
        HiveConf hiveConf = new HiveConf();
        MetastoreConf.setVar(hiveConf, MetastoreConf.ConfVars.THRIFT_URIS,
                hiveMetaServerUrl);
        try {
            return new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }
    }
    public List<String> getTableNameList(String databaseName){
        List<String> allTables = null;
        try {
            allTables = hiveClient.getAllTables(databaseName);
        } catch (TException e) {
            throw new RuntimeException(e);
        }
        return allTables;
    }

    @Override
    public TableMetaInfo getTableMetaInfoFromHive(String databaseName, String tableName) {
        TableMetaInfo tableMetaInfo = new TableMetaInfo();
        try {
            Table table = hiveClient.getTable(databaseName, tableName);
            tableMetaInfo.setTableName(tableName);
            tableMetaInfo.setSchemaName(databaseName);
            // 列信息：内容弹性变化大 所以放在一个json里
            List<FieldSchema> fieldSchemaList = table.getSd().getCols();
            PropertyPreFilters.MySimplePropertyPreFilter filter = new PropertyPreFilters()
                    .addFilter("comment", "name", "type");
            String colNameJson = JSON.toJSONString(fieldSchemaList, filter);
            tableMetaInfo.setColNameJson(colNameJson);
            // 表类型 external table \ managed table
            table.setTableType(table.getTableType());
            //获得表备注
            Map<String, String> parameters = table.getParameters();//获取表参数（备注、建表时间）
            tableMetaInfo.setTableComment(parameters.get("comment"));

            //表目录所有者
            tableMetaInfo.setTableFsOwner(table.getOwner());

            //表存储路径
            tableMetaInfo.setTableFsPath(table.getSd().getLocation());

            //表输入格式
            tableMetaInfo.setTableInputFormat(table.getSd().getInputFormat());
            //表输出格式
            tableMetaInfo.setTableOutputFormat(table.getSd().getInputFormat());

            //序列化信息
            tableMetaInfo.setTableRowFormatSerde(table.getSd().getSerdeInfo().getSerializationLib());

            //表创建时间
            tableMetaInfo.setTableCreateTime(String.valueOf(new Date(table.getCreateTime() * 1000L)));

            //表分区字段
            List<FieldSchema> partitionKeys = table.getPartitionKeys();
            String partitionColJson = JSON.toJSONString(partitionKeys, filter);
            tableMetaInfo.setPartitionColNameJson(partitionColJson);


            //表分桶
            tableMetaInfo.setTableBucketNum(table.getSd().getNumBuckets() + 0L);
            if (tableMetaInfo.getTableBucketNum() > 0) {
                List<String> bucketCols = table.getSd().getBucketCols();
                tableMetaInfo.setTableBucketColsJson(JSON.toJSONString(bucketCols));
                tableMetaInfo.setTableSortColsJson(JSON.toJSONString(table.getSd().getSortCols()));
            }

            // 其他表辅助信息
            tableMetaInfo.setTableParametersJson(JSON.toJSONString(table.getParameters()));
        } catch (TException e) {
            throw new RuntimeException(e);
        }

        return tableMetaInfo;
    }
    // 补充hdfs相关信息
    @Override
    public void addHdfsInfo(TableMetaInfo tableMetaInfo) {
        String tableFsPath = tableMetaInfo.getTableFsPath();
    }
}
