package com.dukk.espark.core;

import com.dukk.espark.utils.ConfigKit;
import com.dukk.espark.udf.DemoUdf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.serializer.KryoSerializer;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.datasyslab.geospark.serde.GeoSparkKryoRegistrator;
import org.datasyslab.geosparksql.utils.GeoSparkSQLRegistrator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;


/**
 *
 * spark 初始化核心入口类
 *
 * @version 2020-08-05
 * @author shengshi_feiyang@yeah.net
 *
 */
public class SparkCtx  implements Serializable {

    private static final long serialVersionUID = 2362757113367696284L;

    private static final Logger logger = LoggerFactory.getLogger(SparkCtx.class);

    private SparkSession sparkSession = null;

    private JavaSparkContext javaSparkContext = null;

    public SparkCtx(ConfigKit configKit, String appName, boolean isDebug){

        SparkSession.Builder builder = null;

        if(isDebug){ //本地模式
            builder = SparkSession.builder()
                    .master("local");
        }else{ //集群模式
            builder = SparkSession.builder();
        }

        this.sparkSession = builder
                            .appName(appName)
                            .config("spark.mongodb.input.uri", configKit.getEsparkMongodbUrl())
                            .config("spark.mongodb.input.database", configKit.getEsparkMongodbDatabase())
                            .config("spark.mongodb.input.collection", "spark_init_collection") //做初始化链接使用
                            .config("spark.mongodb.output.uri", configKit.getEsparkMongodbUrl())
                            .config("spark.mongodb.output.database", configKit.getEsparkMongodbDatabase())
                            .config("spark.mongodb.output.collection", "spark_init_collection") //做初始化链接使用
                            .config("spark.serializer",KryoSerializer.class.getName())
                            .config("spark.kryo.registrator", GeoSparkKryoRegistrator.class.getName())
                            .config("spark.extraListeners","com.dukk.espark.core.EngineListener")
                            .getOrCreate();

        //装载自定义函数
        registerUdf(this.sparkSession.sqlContext());
        //启用geo
        GeoSparkSQLRegistrator.registerAll(this.sparkSession);
        //JdbcDialects.registerDialect(new OracleCustomizeDialect());

        this.javaSparkContext = new JavaSparkContext(this.sparkSession.sparkContext());

    }

    public void close(){
        if(null != sparkSession){
            sparkSession.close();
        }
        if(null != javaSparkContext){
            javaSparkContext.close();
        }
    }


    public SparkSession getSparkSession() {
        return sparkSession;
    }

    public JavaSparkContext getJavaSparkContext() {
        return javaSparkContext;
    }

    /**
     *  注册 自定义函数
     *
     *  用户定义后自行register
     */
    private void registerUdf(SQLContext sqlContext){
        sqlContext.udf().register("easy_udf_demo", new DemoUdf(),DataTypes.LongType);

    }
}
