package com.apex.spark.sink;

import com.alibaba.fastjson.JSONObject;
import com.apex.spark.SparkEnvironment;
import com.apex.spark.batch.SparkBatchSink;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import org.apache.spark.api.java.function.ForeachPartitionFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;

public class JdbcBatchSink implements SparkBatchSink {
    private final Logger logger = LoggerFactory.getLogger(JdbcBatchSink.class);
    private Config config;
    private Properties properties;
    private String url = "";
    private String schemaTablename = "";

    @Override
    public void prepare(SparkEnvironment plugin) {
        properties = new Properties();
        if (config.hasPath(ConfigKeyName.DRIVER_CLASS_WRITER)){
            switch (config.getString(ConfigKeyName.DRIVER_CLASS_WRITER)){
                case "com.cloudera.impala.jdbc41.Driver":
                case "com.cloudera.hive.jdbc41.HS2Driver":
                    url = config.getString(ConfigKeyName.JDBC_URL_WRITER);
                    schemaTablename = config.getString(ConfigKeyName.JDBC_TABLE_WRITER);
                    properties.put("driver",config.getString(ConfigKeyName.DRIVER_CLASS_WRITER));
                    break;
                case "com.mysql.jdbc.Driver":
                case "oracle.jdbc.driver.OracleDriver":
                case "ru.yandex.clickhouse.ClickHouseDriver":
                    properties.put("driver",config.getString(ConfigKeyName.DRIVER_CLASS_WRITER));
                    properties.put("user",config.getString(ConfigKeyName.JDBC_USER_NAME_WRITER));
                    properties.put("password",config.getString(ConfigKeyName.JDBC_PASSWORD_WRITER));
                    url = config.getString(ConfigKeyName.JDBC_URL_WRITER);
                    schemaTablename = config.getString(ConfigKeyName.JDBC_TABLE_WRITER);
                    break;
                default:
                    logger.error("no support");
                    System.exit(0);
            }
            //额外的配置选项
            if (config.hasPath(ConfigKeyName.JDBC_OPTIONS_WRITER)){
                String options =  config.getAnyRef(ConfigKeyName.JDBC_OPTIONS_WRITER).toString();
                String jsonOptions = JSONObject.toJSONString(options.replaceAll("=",":"));
                String jsonString = JSONObject.parse(jsonOptions).toString();
                for (Map.Entry<String, Object> entry :
                        JSONObject.parseObject(jsonString).entrySet()) {
                    properties.put(entry.getKey(),String.valueOf(entry.getValue()));
                }
            }
        }

    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    @Override
    public void outputBatch(SparkEnvironment env, Dataset<Row> dataSet) {
        switch (properties.getProperty("driver")){
            case "ru.yandex.clickhouse.ClickHouseDriver":
                //spark 不支持 clickhouse 自动建表 因此只能选择append模式写入
                dataSet.write().mode(SaveMode.Append).jdbc(url,schemaTablename,properties);
            case "com.mysql.jdbc.Driver":
            case "oracle.jdbc.driver.OracleDriver":
                //只能写入到临时表中
                dataSet.write().mode(SaveMode.Overwrite).jdbc(url,schemaTablename,properties);
            case "com.cloudera.impala.jdbc41.Driver":
            case "com.cloudera.hive.jdbc41.HS2Driver":
                dataSet.foreachPartition(new ForeachPartitionFunction<Row>() {
                    @Override
                    public void call(Iterator<Row> iterator) throws Exception {
                        Class.forName(properties.getProperty("driver"));
                        Connection connection = DriverManager.getConnection(url);
                        PreparedStatement prepareStatement = null;
                        StringBuilder builder = new StringBuilder();
                        while (iterator.hasNext()){
                            Row row = iterator.next();
                            builder.append("(");
                            int count = 1;
                            StructField [] structFields = row.schema().fields();
                            for (int i = 1; i < row.size(); i++ ){
                                count ++;
                                builder.append("?");
                                builder.append(",");
                                if (count == structFields.length){
                                    builder.append("?");
                                }
                            }
                            builder.append(")");
                            String sql = "insert into " + schemaTablename + " values " + builder;
                            //清空
                            builder.setLength(0);
                            prepareStatement = connection.prepareStatement(sql);
                            for (int i = 0 ; i< structFields.length; i++){
                                String type;
                                if (structFields[i].dataType().catalogString().toLowerCase().contains("decimal")){
                                    type = "decimal";
                                }else if (structFields[i].dataType().catalogString().toLowerCase().contains("varchar")){
                                    type = "string";
                                }else {
                                    type = structFields[i].dataType().catalogString().toLowerCase();
                                }
                                switch (type){
                                    case "string":
                                    case "date":
                                        if (Objects.isNull(row.getAs(structFields[i].name()))){
                                            prepareStatement.setString(i+1,"null");
                                        }else {
                                            prepareStatement.setString(i+1,row.getAs(structFields[i].name()));
                                        }
                                        break;
                                    case "decimal":
                                        if (Objects.isNull(row.getAs(structFields[i].name()))){
                                            prepareStatement.setBigDecimal(i+1,new BigDecimal(0));
                                        }else {
                                            prepareStatement.setBigDecimal(i+1,row.getAs(structFields[i].name()));
                                        }
                                        break;
                                    case "int":
                                    case "integer":
                                        if (Objects.isNull(row.getAs(structFields[i].name()))){
                                            prepareStatement.setInt(i+1,0);
                                        }else {
                                            prepareStatement.setInt(i+1,
                                                    new BigDecimal(row.getAs(structFields[i].name()).toString()).intValue()
                                            );
                                        }
                                        break;
                                    case "long":
                                    case "bigint":
                                        if (Objects.isNull(row.getAs(structFields[i].name()))){
                                            prepareStatement.setLong(i+1,0);
                                        }else {
                                            prepareStatement.setLong(i+1,row.getAs(structFields[i].name()));
                                        }
                                        break;
                                    case "float":
                                        if (Objects.isNull(row.getAs(structFields[i].name()))){
                                            prepareStatement.setFloat(i+1,0);
                                        }else {
                                            prepareStatement.setFloat(i+1,row.getAs(structFields[i].name()));
                                        }
                                        break;
                                    case "double":
                                        if (Objects.isNull(row.getAs(structFields[i].name()))){
                                            prepareStatement.setDouble(i+1,0);
                                        }else {
                                            prepareStatement.setDouble(i+1,row.getAs(structFields[i].name()));
                                        }
                                        break;
                                    case "byte":
                                        prepareStatement.setByte(i+1,row.getAs(structFields[i].name()));
                                        break;
                                    case "boolean":
                                        prepareStatement.setBoolean(i+1,row.getAs(structFields[i].name()));
                                        break;
                                }
                            }
                            if (!Objects.isNull(prepareStatement)){
                                prepareStatement.executeUpdate();
                                prepareStatement.close();
                            }
                        }
                    }
                });
        }
    }
}
