package com.atguigu.dwm.common.component;


import com.atguigu.dwm.common.constants.CommonCodes;
import com.atguigu.dwm.common.constants.TableParams;
import com.atguigu.dwm.model.bean.DmTable;
import com.atguigu.dwm.model.bean.DmTableColumn;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.*;
import org.apache.thrift.TException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Component
public class TableHiveProcessor {

    IMetaStoreClient hiveMetaClient= null;// initHiveClient();

    @Value("${hive.metastore.url}")
    String hiveMetastoreUrl;

    @Value("${hadoop.username}")
    String hadoopUser;

    @Value("${warehouse.root.path}")
    String warehouseRootPath;

    @PostConstruct   //  事前：prexXX  事后postXXX
    public void initHiveClient()     {

        System.setProperty("HADOOP_USER_NAME",hadoopUser);
        HiveConf hiveConf=new HiveConf();
        hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,hiveMetastoreUrl );

        try {
            hiveMetaClient=  new HiveMetaStoreClient(hiveConf);
        } catch (MetaException e) {
            throw new RuntimeException(e);
        }

    }

    public  void  createDataBaseToHive(String databaseName) throws TException {
        Database database  = new Database();
        database.setName(databaseName);
        hiveMetaClient.createDatabase(database);
    }


    //
    public void createTable(DmTable dmTable) throws  Exception {
       Table table= extractHiveTable(dmTable);
   //     Table table1 = hiveMetaClient.getTable("gmall", "dwd_trade_cart_full");

        hiveMetaClient.dropTable(dmTable.getSchemaName(),dmTable.getTableName());
        hiveMetaClient.createTable(table);
    }

    private Table extractHiveTable(DmTable dmTable) {
        Table table = new Table();
        //表名 库名 用户  表类型
        table.setDbName(dmTable.getSchemaName());
        table.setTableName(dmTable.getTableName());
        table.setTableType("EXTERNAL_TABLE");
        table.setOwner(hadoopUser);

        //参数：压缩 、备注
        table.setParameters(this.getParameters(dmTable));

        //分区字段
        table.setPartitionKeys(getPartitionKeys(dmTable.getPartitionColumns()));
        //列字段
        StorageDescriptor storageDescriptor = new StorageDescriptor();
        storageDescriptor.setCols(this.getColumns(dmTable.getTableColumns()));

        //存储位置
        storageDescriptor.setLocation(warehouseRootPath+"/"+dmTable.getSchemaName()+"/"+dmTable.getTableName());
        //输出格式
        storageDescriptor.setOutputFormat(this.getOutputFormat(dmTable));
        //输入格式
        storageDescriptor.setInputFormat(this.getInputFormat(dmTable.getStorageFormat()));
        //序列化信息
        storageDescriptor.setSerdeInfo(this.getSerdeInfo(dmTable));

        table.setSd(storageDescriptor);



        return table;
    }

    private Map<String, String> getParameters(DmTable dmTable) {
        Map<String, String> parameters = new HashMap<>();
        if (dmTable.getCompressType().equals(CommonCodes.COMPRESS_TYPE_GZIP)){
            //"compression.codec": "org.apache.hadoop.io.compress.GzipCodec"
            parameters.put("compression.codec", TableParams.COMPRESS_TYPE_GZIP);
        } else if (dmTable.getCompressType().equals(CommonCodes.COMPRESS_TYPE_SNAPPY)
                &&dmTable.getStorageFormat().equals(CommonCodes.STORAGE_FORMAT_ORC)) {
            parameters.put("orc.compress", TableParams.COMPRESS_TYPE_SNAPPY_SHORT);
        } else if (dmTable.getCompressType().equals(CommonCodes.COMPRESS_TYPE_SNAPPY)
                &&dmTable.getStorageFormat().equals(CommonCodes.STORAGE_FORMAT_PARQUET)) {
            parameters.put("parquet.compress", TableParams.COMPRESS_TYPE_SNAPPY_SHORT);
        }

        parameters.put("comment",dmTable.getTableNameChn());
        return parameters;
    }

    private String getCompressionCodec(String compressionType){
        switch (compressionType){
            case CommonCodes.COMPRESS_TYPE_GZIP:
                // 对应GZIP压缩类型的压缩编码
                return TableParams.COMPRESS_TYPE_GZIP;
            case CommonCodes.COMPRESS_TYPE_SNAPPY:
                // 对应SNAPPY压缩类型的压缩编码
                return TableParams.COMPRESS_TYPE_SNAPPY;
            case CommonCodes.COMPRESS_TYPE_NONE:
                // 不进行压缩时，返回null
                return null;
        }
        // 未识别的压缩类型，返回null
        return null;
    }


    private String getOutputFormat(DmTable dmTable) {
        switch (dmTable.getStorageFormat()){
            case CommonCodes.STORAGE_FORMAT_ORC:
                return TableParams.ORC_OUTPUT_FORMAT;
            case CommonCodes.STORAGE_FORMAT_PARQUET:
                return TableParams.PARQUET_OUTPUT_FORMAT;
            case CommonCodes.STORAGE_FORMAT_TEXT_TAB:
                return TableParams.TEXT_OUTPUT_FORMAT;
            case CommonCodes.STORAGE_FORMAT_TEXT_JSON:
                return TableParams.TEXT_OUTPUT_FORMAT;
        }
        return null;
    }

    private String getInputFormat(String storageFormat){
        //利用switch 映射storageFormat的值为  TableParams中的INPUT_FORMAT
        switch (storageFormat){
            case CommonCodes.STORAGE_FORMAT_PARQUET:
                return TableParams.PARQUET_INPUT_FORMAT;
            case CommonCodes.STORAGE_FORMAT_TEXT_JSON:
                return TableParams.TEXT_INPUT_FORMAT;
            case CommonCodes.STORAGE_FORMAT_TEXT_TAB:
                return TableParams.TEXT_INPUT_FORMAT;
            case CommonCodes.STORAGE_FORMAT_ORC:
                return TableParams.ORC_INPUT_FORMAT;
        }
        return null;
    }

    //根据storageFormat 创建serdeInfo
    private  SerDeInfo getSerdeInfo(DmTable dmTable) {
        SerDeInfo serDeInfo = new SerDeInfo();
        serDeInfo.setParameters(new HashMap<>());
        switch (dmTable.getStorageFormat()) {
            case CommonCodes.STORAGE_FORMAT_PARQUET:
                serDeInfo.setSerializationLib(TableParams.SERDE_CLASS_PARQUET);
                break;
            case CommonCodes.STORAGE_FORMAT_TEXT_JSON:
                serDeInfo.setSerializationLib(TableParams.SERDE_CLASS_JSON);
                break;
            case CommonCodes.STORAGE_FORMAT_TEXT_TAB:
                serDeInfo.setSerializationLib(TableParams.SERDE_CLASS_TEXT);
                serDeInfo.getParameters().put("field.delim", "\t");//输入
                serDeInfo.getParameters().put("serialization.format", "\t");//输出
                break;
            case CommonCodes.STORAGE_FORMAT_ORC:
                serDeInfo.setSerializationLib(TableParams.SERDE_CLASS_ORC);
                break;
        }
        //根据dmTable中的null对serde进行赋值
        if(dmTable.getNullDefined()!=null&&dmTable.getNullDefined().trim().length()>0){
            //换成0长度字符串
            serDeInfo.getParameters().put("serialization.null.format", dmTable.getNullDefined().replace("'",""));
        }
        return  serDeInfo;
    }


    private List<FieldSchema> getColumns(List<DmTableColumn> tableColumns) {
        List<FieldSchema> list = new ArrayList<>();
        if(tableColumns!=null && tableColumns.size()>0){
            for (DmTableColumn dmTableColumn : tableColumns) {
                FieldSchema fieldSchema=new FieldSchema();
                fieldSchema.setName(dmTableColumn.getColumnName());
                fieldSchema.setType(this.getHiveDataType(dmTableColumn.getDataType())  );
                fieldSchema.setComment(dmTableColumn.getColumnComment());
                list.add(fieldSchema);
            }
        }
        return list;
    }

    private List<FieldSchema> getPartitionKeys(List<DmTableColumn> partitionColumns) {
       List<FieldSchema> list = new ArrayList<>();
        if(partitionColumns!=null && partitionColumns.size()>0){
            for (DmTableColumn dmTableColumn : partitionColumns) {
                FieldSchema fieldSchema=new FieldSchema();
                fieldSchema.setName(dmTableColumn.getColumnName());
                fieldSchema.setType(this.getHiveDataType(dmTableColumn.getDataType()));
                fieldSchema.setComment(dmTableColumn.getColumnComment());
                list.add(fieldSchema);
            }
        }
        return list;
    }

    //根据dmTableColumn中的dataType 生成hive字段的数据类型
    public static String getHiveDataType(String dataType){
        switch (dataType){
            case CommonCodes.DATA_TYPE_STRING:
                return "string";
            case CommonCodes.DATA_TYPE_DECIMAL:
                return "decimall";
            case CommonCodes.DATA_TYPE_DECIMAL_16_2:
                return "decimal(16,2)";
            case CommonCodes.DATA_TYPE_BIGINT:
                return "bigint";

        }
        return  "string";
    }

    public List<String> getDatabaseNameList() throws  Exception {
        List<String> databaseNameList = hiveMetaClient.getAllDatabases();
        return databaseNameList;
    }

    //获取数据库下的表名
    public List<String> getTableNameList(String databaseName) throws  Exception {
        List<String> tableNameList = hiveMetaClient.getAllTables(databaseName);
        return tableNameList;
    }

    //根据库名和表名获得 Table对象
    public Table getTable(String databaseName,String tableName) throws  Exception {
        Table table = hiveMetaClient.getTable(databaseName, tableName);
        return table;
    }


    //从hive中提取表的元数据，同步给dmTable
    public void syncTableMeta(DmTable dmTable) throws TException {
         //1 从hive中提取表的元数据 table
        Table table = hiveMetaClient.getTable(dmTable.getSchemaName(), dmTable.getTableName());
        // 2 从table中提取表的元数据 赋值给dmTable
        // 提取 参数信息 压缩方式 表备注
        dmTable.setCompressType(getCompressTypeFromHive(table));
        dmTable.setTableNameChn(getTableCommentFromHive(table));
        // 分区字段
        dmTable.setPartitionColumns(getColumnsFromHive(table.getPartitionKeys(),true));
        //普通字段
        dmTable.setTableColumns(getColumnsFromHive(table.getSd().getCols(),false));

        dmTable.setStorageFormat(parseStorageFormat(table.getSd()));
        //空值替换
        dmTable.setNullDefined(getNullDefined(table.getSd()));
        //层级
        dmTable.setDwLevel(getDwLevel(table.getTableName()));

        //还可以通过表名的信息  推断出 存储策略  统计周期  数据域 业务过程 维度


    }

    //根据表名的前缀来判断表的层级
    private String getDwLevel(String tableName) {

        if(tableName.startsWith("ods_")){
            return CommonCodes.DW_LEVEL_ODS;
        } else if(tableName.startsWith("dwd_")){
            return CommonCodes.DW_LEVEL_DWD;
        } else if(tableName.startsWith("dws_")){
            return CommonCodes.DW_LEVEL_DWS;
        } else if(tableName.startsWith("ads_")){
            return CommonCodes.DW_LEVEL_ADS;
        } else if(tableName.startsWith("dim_")){
            return CommonCodes.DW_LEVEL_DIM;
        }
        return null;
    }

    private String getNullDefined(StorageDescriptor sd) {
        if(sd.getSerdeInfo().getParameters().containsKey("serialization.null.format")){
            String nullString = sd.getSerdeInfo().getParameters().get("serialization.null.format");
            if(nullString.equals("")){
                return "''";
            }
            return nullString;
        }
        return null;
    }

    private String parseStorageFormat(StorageDescriptor sd) {
        if (sd.getInputFormat().equals(TableParams.PARQUET_INPUT_FORMAT)){
            return CommonCodes.STORAGE_FORMAT_PARQUET;
        } else if (sd.getInputFormat().equals(TableParams.ORC_INPUT_FORMAT)){
            return CommonCodes.STORAGE_FORMAT_ORC;
        }  else if (sd.getInputFormat().equals(TableParams.TEXT_INPUT_FORMAT)){
            if (sd.getSerdeInfo().getSerializationLib().equals(TableParams.SERDE_CLASS_JSON)){
                return CommonCodes.STORAGE_FORMAT_TEXT_JSON;
            } else {
                return CommonCodes.STORAGE_FORMAT_TEXT_TAB;
            }
        }
        return null;
    }


    private List<DmTableColumn> getColumnsFromHive(List<FieldSchema> fieldSchemaList,boolean isPartitionCol) {

        if(fieldSchemaList!=null && fieldSchemaList.size()>0){
            List<DmTableColumn> dmTableColumnList  = new ArrayList<>();
            for (FieldSchema fieldSchema : fieldSchemaList) {
                DmTableColumn dmTableColumn = new DmTableColumn();
                dmTableColumn.setColumnName(fieldSchema.getName());
                dmTableColumn.setDataType(this.getDataTypeCodeByHiveDataType(fieldSchema.getType())  );
                dmTableColumn.setColumnComment(fieldSchema.getComment());
                dmTableColumn.setIsPartitionCol( isPartitionCol?"1":"0");
                dmTableColumnList.add(dmTableColumn);
            }
            return dmTableColumnList;
        }
        return null;

    }

    private String getDataTypeCodeByHiveDataType(String type) {
        switch (type){
            case "string":
                return CommonCodes.DATA_TYPE_STRING;
            case "decimal":
                return CommonCodes.DATA_TYPE_DECIMAL;
            case "decimal(16,2)":
                return CommonCodes.DATA_TYPE_DECIMAL_16_2;
            case "bigint":
                return CommonCodes.DATA_TYPE_BIGINT;
        }
         return CommonCodes.DATA_TYPE_STRING;
    }

    private String getTableCommentFromHive(Table table) {
        Map<String, String> parameters = table.getParameters();
        if ( parameters!=null){
            String comment = parameters.get("comment");
            return comment;
        }
        return null;
    }

    //通过table
    private String getCompressTypeFromHive(Table table) {
        //1  gizp
        Map<String, String> parameters = table.getParameters();
        if(parameters.containsKey("compression.codec")){
            String compressCodec = parameters.get("compression.codec");
            if(compressCodec.toLowerCase().contains("gzip")){
                return CommonCodes.COMPRESS_TYPE_GZIP;
            }
        //2 snappy
        } else if (parameters.containsKey("orc.compress")||parameters.containsKey("parquet.compression")) {
            String compressCodec = parameters.get("orc.compress");
            if(compressCodec==null){
                compressCodec= parameters.get("parquet.compress");
            }

            if(compressCodec.toLowerCase().contains("snappy")){
                return CommonCodes.COMPRESS_TYPE_SNAPPY;
            }
        }
        return CommonCodes.COMPRESS_TYPE_NONE;
    }
}
