package com.bigdata.wsr.createTable;


import com.alibaba.druid.pool.DruidDataSource;
import com.bigdata.wsr.createTable.bean.FieldInfo;
import com.bigdata.wsr.createTable.config.DbConfig;
import com.bigdata.wsr.createTable.sink.HiveDdlUtil;
import com.bigdata.wsr.createTable.source.SourceTableUtil;
import lombok.extern.slf4j.Slf4j;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 自动创建表
 *
 * @author rui.wang
 * @date 2022/11/14
 */
@Slf4j
public class AutoCreateTableOnHive {
    public static void main(String[] args) {
        // MySQL 数据源
        DruidDataSource ds = DbConfig.getMysql137AccountDs();

        // 待创建的表
        List<String> tableList = new ArrayList<>();
//        tableList.add("clms_fee_package");
//        tableList.add("clms_fee_pay_apply_bill");
//        tableList.add("clms_fee_pay_apply_bill_fee");
//        tableList.add("clms_product");
        tableList.add("ofs_make_loan");
//        tableList.add("ofs_receipt_busi_detail");

        for (String tableName : tableList) {
            //todo: 1、查询 mysql表数据
            String ddl = SourceTableUtil.getTableDdl(tableName, ds);
            int columnCount = SourceTableUtil.getColumnCount(tableName, ds);

            //todo：2、解析ddl
            List<FieldInfo> fieldInfoList = new ArrayList<>();
            Map<String, String> keyFieldMap = new HashMap<>();
            SourceTableUtil.ddlParseToField(ddl, columnCount, fieldInfoList, keyFieldMap);

            //todo：3、适配 hive 中的字段类型
            List<FieldInfo> hiveFieldInfoList = new ArrayList<>();
            HiveDdlUtil.hiveFieldTypeAdaptor(fieldInfoList, hiveFieldInfoList);

            //todo：4、组装成 hive 建表语句。
            String hiveDdl = HiveDdlUtil.getHiveDdl(tableName, hiveFieldInfoList, keyFieldMap);
            System.out.println(hiveDdl);
        }
    }
}
