package org.apache.hadoop.hive.ql.hooks;

import com.sql.blood.common.enums.DataSourceEnum;
import com.sql.blood.common.model.TabLine;
import org.apache.commons.lang3.StringUtils;

class HiveUtils {

    private static final String DEFAULT_DB = "default";
    private static final String SPLIT_AND = "&";
    private static final String SPLIT_DOT = ".";

    private HiveUtils(){}

    static String fillDB(String nowTable) throws RuntimeException {
        if (StringUtils.isBlank(nowTable)) {
            return nowTable;
        }
        StringBuilder sb = new StringBuilder();
        String[] tableArr = nowTable.split(SPLIT_AND);
        for (String tables : tableArr) {
            String[] split = tables.split("\\" + SPLIT_DOT);
            if (split.length > 2) {
                throw new RuntimeException("parse table:" + nowTable);
            }
            String db = split.length == 2 ? split[0] : DEFAULT_DB;
            String table = split.length == 2 ? split[1] : split[0] ;
            sb.append(db).append(SPLIT_DOT).append(table).append(SPLIT_AND);
        }
        if (sb.length()>0) {
            sb.setLength(sb.length()-1);
        }
        return sb.toString();
    }

    static TabLine splitDbTable(String url, String bd_table) {
        if(bd_table == null || bd_table.trim().isEmpty()) {
            return null;
        }
        if(bd_table.contains(".")) {
            TabLine tabLine = new TabLine();
            tabLine.setUrl(url);
            tabLine.setDatasource(DataSourceEnum.HIVE.getSourceType());
            tabLine.setDatabase(bd_table.split("\\.")[0]);
            tabLine.setTable(bd_table.split("\\.")[1]);
            return tabLine;
        } else {
            return null;
        }
    }
}
