package com.qiandw.spark

import com.qiandw.common._
import com.qiandw.common.cassandra.CassandraConfig
import com.qiandw.config.{DemoMysqlConfig, SparkConfig}
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
import org.springframework.beans.factory.InitializingBean
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component

@Component
class SparkContextFactoryImpl extends SparkContextFactory with InitializingBean {

  private var sc: SparkContext = _

  private var ss: SparkSession = _

  @Autowired
  var sparkCfg: SparkConfig = _

  @Autowired
  var demoCfg: DemoMysqlConfig = _

  @Autowired
  var cassandraConfig: CassandraConfig = _

  def getJarArray: Array[String] = {
    if (!sparkCfg.jars.isNullOrWhiteSpace) return sparkCfg.jars.split(",")
    new Array[String](0)
  }

  override def getSparkContext: SparkContext = sc

  override def getSparkSession: SparkSession = ss

  override def afterPropertiesSet(): Unit = {

    var sparkConf = new SparkConf()
      .set("spark.cassandra.connection.host", cassandraConfig.nodes)
      .set("spark.cassandra.auth.username", cassandraConfig.user)
      .set("spark.cassandra.auth.password", cassandraConfig.password)
      .set("spark.cassandra.connection.local_dc", cassandraConfig.datacenter)
      .set("spark.jdbc.driver.class", demoCfg.driverClass)
      .set("spark.jdbc.connection.url", demoCfg.conStr)
      .set("spark.jdbc.auth.username", demoCfg.username)
      .set("spark.jdbc.auth.password", demoCfg.password)
      .setMaster(sparkCfg.master)
      .set("spark.testing.memory", "600000000")

    if (!sparkCfg.jars.isNullOrWhiteSpace) {
      sparkConf = sparkConf.setJars(getJarArray)
    }
    if (!sparkCfg.home.isNullOrWhiteSpace) {
      sparkConf = sparkConf.setSparkHome(sparkCfg.home)
    }
    if (!sparkCfg.appName.isNullOrWhiteSpace) {
      sparkConf = sparkConf.setAppName(sparkCfg.appName)
    } else {
      sparkConf = sparkConf.setAppName("Test-Spark-App")
    }
    ss = SparkSession.builder().config(sparkConf).getOrCreate()
    sc = ss.sparkContext

  }

}
