package com.apex.spark.source;

import com.alibaba.fastjson.JSONObject;
import com.apex.spark.SparkEnvironment;
import com.apex.spark.batch.SparkBatchSource;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.jdbc.JdbcDialect;
import org.apache.spark.sql.jdbc.JdbcDialects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Map;
import java.util.Properties;

public class JdbcBatchSource implements SparkBatchSource {


    private final Logger logger = LoggerFactory.getLogger(JdbcBatchSource.class);

    private Config config;
    private Properties properties;
    private String url = "";
    private String schemaTablename = "";
    private String query = "";

    @Override
    public Dataset<Row> getData(SparkEnvironment environment) {
        //解决spark JDBC查询impala 列类型转换出错问题
        JdbcDialect ImpalaDialect = new JdbcDialect() {
            @Override
            public boolean canHandle(String url) {
                return url.startsWith("jdbc:impala") ||
                        url.contains("impala") ||
                        url.startsWith("jdbc:hive2") ||
                        url.contains("hive2");
            }

            @Override
            public String quoteIdentifier(String colName) {
                return colName;
            }
        };
        JdbcDialects.registerDialect(ImpalaDialect);

        SparkSession spark = environment.getSparkSession();

        Dataset<Row> dataset = null;
        if (StringUtils.isBlank(query)){
            dataset = spark.read().jdbc(url, schemaTablename, properties);
        }else if (properties.getProperty("driver").equals("com.cloudera.impala.jdbc41.Driver") |
                properties.getProperty("driver").equals("com.cloudera.hive.jdbc41.HS2Driver")
        ){
            dataset = spark.read()
                    .format("jdbc")
                    .option("driver",properties.getProperty("driver"))
                    .option("url", url)
                    .option("query", query)
                    .load();

        }else {
            dataset = spark.read()
                    .format("jdbc")
                    .option("driver",properties.getProperty("driver"))
                    .option("url", url)
                    .option("query", query)
                    .option("user", properties.getProperty("user"))
                    .option("password", properties.getProperty("password"))
                    .load();
        }
        dataset.printSchema();
        //默认自动注册表名为原始表名称
        if (!StringUtils.isBlank(schemaTablename)) {
            dataset.createOrReplaceTempView(schemaTablename.split("\\.")[1]);
        }
        return dataset;
    }

    @Override
    public void prepare(SparkEnvironment env) {
        properties = new Properties();
        if (config.hasPath(ConfigKeyName.DRIVER_CLASS_READER)) {
            switch (config.getString(ConfigKeyName.DRIVER_CLASS_READER)) {
                case "com.cloudera.impala.jdbc41.Driver":
                case "com.cloudera.hive.jdbc41.HS2Driver":
                    url = config.getString(ConfigKeyName.JDBC_URL_READER);
                    properties.put("driver", config.getString(ConfigKeyName.DRIVER_CLASS_READER));
                    if (config.hasPath(ConfigKeyName.JDBC_TABLE_READER)){
                        schemaTablename = config.getString(ConfigKeyName.JDBC_TABLE_READER);
                    }
                    break;
                case "com.mysql.jdbc.Driver":
                case "oracle.jdbc.driver.OracleDriver":
                    properties.put("driver", config.getString(ConfigKeyName.DRIVER_CLASS_READER));
                    properties.put("user", config.getString(ConfigKeyName.JDBC_USER_NAME_READER));
                    properties.put("password", config.getString(ConfigKeyName.JDBC_PASSWORD_READER));
                    url = config.getString(ConfigKeyName.JDBC_URL_READER);
                    if (config.hasPath(ConfigKeyName.JDBC_TABLE_READER)){
                        schemaTablename = config.getString(ConfigKeyName.JDBC_TABLE_READER);
                    }
                    break;
                default:
                    logger.error("no support");
                    System.exit(0);
            }
            //额外的配置选项
            if (config.hasPath(ConfigKeyName.JDBC_OPTIONS_READER)) {
                String options = config.getAnyRef(ConfigKeyName.JDBC_OPTIONS_READER).toString();
                String jsonOptions = JSONObject.toJSONString(options.replaceAll("=", ":"));
                String jsonString = JSONObject.parse(jsonOptions).toString();
                for (Map.Entry<String, Object> entry :
                        JSONObject.parseObject(jsonString).entrySet()) {
                        properties.put(entry.getKey(), String.valueOf(entry.getValue()));
                }
            }
            if (config.hasPath(ConfigKeyName.JDBC_OPTIONS_QUERY)){
                query = config.getString(ConfigKeyName.JDBC_OPTIONS_QUERY);
            }
        }
    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }
}
