package com.patsnap.data.npd.dw.etl.utils;

import com.patsnap.data.npd.dw.repository.DBOperator;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.StringUtils;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.spark.sql.*;
import org.apache.spark.util.LongAccumulator;
import scala.collection.JavaConverters;
import scala.collection.Seq;

import java.util.*;


@Slf4j
public class SparkUtils {
    private static final String DATASOURCE_NAME_NPD_ODS = "npd_ods";
    private static final String DATASOURCE_NAME_NPD_DWD = "npd_dwd";
    private static final String DATASOURCE_NAME_NPD_ADS = "npd_ads";
    private static final String DATASOURCE_NAME_SA_PRODUCTION = "npd_sa";

    public static DBOperator initDBOperator(Map<String, SqlSessionFactory> sqlSessionFactoryMap) {
        if (sqlSessionFactoryMap == null) {
            return null;
        }

        SqlSessionFactory odsSqlSessionFactory = null;
        if (sqlSessionFactoryMap.containsKey(DATASOURCE_NAME_NPD_ODS)) {
            odsSqlSessionFactory = sqlSessionFactoryMap.get(DATASOURCE_NAME_NPD_ODS);
        }

        SqlSessionFactory dwdSqlSessionFactory = null;
        if (sqlSessionFactoryMap.containsKey(DATASOURCE_NAME_NPD_DWD)) {
            dwdSqlSessionFactory = sqlSessionFactoryMap.get(DATASOURCE_NAME_NPD_DWD);
        }

        SqlSessionFactory adsSqlSessionFactory = null;
        if (sqlSessionFactoryMap.containsKey(DATASOURCE_NAME_NPD_ADS)) {
            adsSqlSessionFactory = sqlSessionFactoryMap.get(DATASOURCE_NAME_NPD_ADS);
        }

        SqlSessionFactory saSqlSessionFactory = null;
        if (sqlSessionFactoryMap.containsKey(DATASOURCE_NAME_SA_PRODUCTION)) {
            saSqlSessionFactory = sqlSessionFactoryMap.get(DATASOURCE_NAME_SA_PRODUCTION);
        }

        return new DBOperator(odsSqlSessionFactory, dwdSqlSessionFactory, adsSqlSessionFactory, saSqlSessionFactory);
    }

    public static SparkSession initSpark(final CommandLine cmd, SparkSession.Builder sparkBuilder) {
        final String appName = cmd.getOptionValue("job_name", "spark app");
        String master = cmd.getOptionValue("master");
        if (StringUtils.contains(master, "local")) {
            sparkBuilder.master(master);
            sparkBuilder.config("spark.sql.warehouse.dir", "file:///~/spark-warehouse");
        }
        sparkBuilder.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                .config("spark.sql.session.timeZone", "Asia/Shanghai");

        String enableSql = cmd.getOptionValue("enable_sql", "false");
        String enableTispark = cmd.getOptionValue("enable_tispark", "false");
        if ("true".equalsIgnoreCase(enableSql)) {
            String hiveExternalDir = cmd.getOptionValue("spark.sql.warehouse.dir");
            if (StringUtils.isBlank(hiveExternalDir)) {
                throw new RuntimeException("CONFIGURATION: spark.sql.warehouse.dir must be set on Apollo!");
            }
            if (hiveExternalDir.startsWith("cosn://data-datalake-pd-io-dataprod-ash-1251949819")) {
                throw new RuntimeException("CONFIGURATION: warehouse dir can not be set to 'cosn://data-datalake-pd-io-dataprod-ash-1251949819'");
            }

            sparkBuilder.config("spark.sql.hive.metastore.uris", cmd.getOptionValue("spark.sql.hive.metastore.uris"))
                    .config("spark.sql.warehouse.dir", cmd.getOptionValue("spark.sql.warehouse.dir"))
                    .config("spark.sql.catalogImplementation", "hive")
                    .config("spark.hadoop.javax.jdo.option.ConnectionURL", cmd.getOptionValue("spark.hadoop.javax.jdo.option.ConnectionURL"))
                    .config("spark.hadoop.javax.jdo.option.ConnectionDriverName", cmd.getOptionValue("spark.hadoop.javax.jdo.option.ConnectionDriverName"))
                    .config("spark.hadoop.javax.jdo.option.ConnectionUserName", cmd.getOptionValue("spark.hadoop.javax.jdo.option.ConnectionUserName"))
                    .config("spark.hadoop.javax.jdo.option.ConnectionPassword", cmd.getOptionValue("spark.hadoop.javax.jdo.option.ConnectionPassword"))
                    .config("spark.hadoop.hive.exec.dynamic.partition", "true") //开启动态分区，默认是false
                    .config("spark.hadoop.hive.exec.dynamic.partition.mode", "nonstrict") //开启允许所有分区都是动态的，否则必须要有静态分区才能使用
                    .enableHiveSupport();
        } else if ("true".equals(enableTispark)) {
            log.warn("enable tispark");
            sparkBuilder.config("spark.sql.extensions", "org.apache.spark.sql.TiExtensions");
            sparkBuilder.config("spark.sql.catalog.tidb_catalog", "org.apache.spark.sql.catalyst.catalog.TiCatalog");
            String pdAddresses = cmd.getOptionValue("pd.addresses");
            sparkBuilder.config("spark.tispark.pd.addresses", pdAddresses);
            sparkBuilder.config("spark.sql.catalog.tidb_catalog.pd.addresses", pdAddresses);
        }
        sparkBuilder.appName(appName);
        final SparkSession spark = sparkBuilder.getOrCreate();
        return spark;
    }

    public static SparkSession initSpark(final CommandLine cmd) {
        return initSpark(cmd, SparkSession.builder());
    }

    public static Seq<String> getSeq(final String... cols) {
        // return JavaConverters.asScalaBufferConverter(Arrays.asList(cols)).asScala().toSeq();
        return JavaConverters.asScalaBufferConverter(Arrays.asList(cols)).asScala();
    }

    public static Column[] cols(final Dataset<?> dataset, final String... colNames) {
        final Column[] columns = new Column[colNames.length];
        for (int i = 0; i < colNames.length; i++) {
            columns[i] = dataset.col(colNames[i]);
        }
        return columns;
    }

    public static Column[] cols(final Column... cols) {
        return cols;
    }

    public static Column[] combineCols(final Column[]... cols) {
        final List<Column> list = new ArrayList<>();
        for (final Column[] columns : cols) {
            list.addAll(Arrays.asList(columns));
        }
        return list.toArray(new Column[list.size()]);
    }

    public static Column[] allColumns(final Dataset<?> dataset, final String... exceptCols) {
        final String[] cols = dataset.columns();
        final Set<String> excepts = new HashSet<>();
        excepts.addAll(Arrays.asList(exceptCols));
        final List<Column> columns = new ArrayList<>();
        for (int i = 0; i < cols.length; i++) {
            if (excepts.contains(cols[i])) {
                continue;
            }
            columns.add(dataset.col(cols[i]));
        }
        return columns.toArray(new Column[columns.size()]);
    }

    public static Dataset<Row> columnsToSubSchema(final Dataset<Row> parent, final String structName,
                                                  final Dataset<Row> colFromWhere, final String... colNames) {
        final Column[] cols = new Column[colNames.length];
        for (int i = 0; i < colNames.length; i++) {
            cols[i] = colFromWhere.col(colNames[i]);
        }
        return parent.withColumn(structName, functions.struct(cols));
    }


    public static Map<String, String> lzoCompressOption() {
        final Map<String, String> options = new HashMap<>();
        options.put("compression", "com.hadoop.compression.lzo.LzopCodec");
        return options;
    }

    public static void printResult(final LongAccumulator... accumulators) {
        for (final LongAccumulator accumulator : accumulators) {
            log.info(accumulator.name() + " is : " + accumulator.value());
        }
    }

    public static String getDeltaTablePath(String table, String database) {
        return "cosn://data-datalake-pd-io-dataprod-ash-1251949819/deltalake/" + database + ".db/" + table + "/";
    }
}
