package com.dmall.scf.utils

import com.dmall.scf.Profile
import com.dmall.scf.dto.{Env, MySqlConfig}
import org.apache.spark.sql.{DataFrame, Encoder, SparkSession}

import scala.collection.JavaConversions._

/**
 * @descrption Spark工具类
 * scf
 * @author wangxuexing
 * @date 2019/12/23
 */
object SparkUtils {
  //开发环境
  val DEV_URL = "jdbc:mysql://IP:PORT/dmall_scf?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&failOverReadOnly=false&useSSL=false"
  val DEV_USER = "user"
  val DEV_PASSWORD = "password"
  //生产测试环境
  val PROD_TEST_URL = "jdbc:mysql://IP:PORT/dmall_scf_test?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&failOverReadOnly=false&zeroDateTimeBehavior=convertToNull&useSSL=false"
  val PROD_TEST_USER = "user"
  val PROD_TEST_PASSWORD = "password"
  //生产环境
  val PROD_URL = "jdbc:mysql://IP:PORT/dmall_scf?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&failOverReadOnly=false&useSSL=false"
  val PROD_USER = "user"
  val PROD_PASSWORD = "password"

  def env = Profile.currentEvn

  /**
   * 获取环境设置
   * @return
   */
  def getEnv: Env ={
    env match {
      case Profile.DEV => Env(MySqlConfig(DEV_URL, DEV_USER, DEV_PASSWORD), SparkUtils.getDevSparkSession)
      case Profile.PROD =>
        Env(MySqlConfig(PROD_URL,PROD_USER,PROD_PASSWORD), SparkUtils.getProdSparkSession)
      case Profile.PROD_TEST =>
        Env(MySqlConfig(PROD_TEST_URL, PROD_TEST_USER, PROD_TEST_PASSWORD), SparkUtils.getProdSparkSession)
      case _ => throw new Exception("无法获取环境")
    }
  }

  /**
   * 获取生产SparkSession
   * @return
   */
  def getProdSparkSession: SparkSession = {
    SparkSession
      .builder()
      .appName("scf")
      .enableHiveSupport()//激活hive支持
      .getOrCreate()
  }

  /**
   * 获取开发SparkSession
   * @return
   */
  def getDevSparkSession: SparkSession = {
    SparkSession
      .builder()
      .master("local[*]")
      .appName("local-1576939514234")
      .config("spark.sql.warehouse.dir", "C:\\data\\spark-ware")//不指定，默认C:\data\projects\parquet2dbs\spark-warehouse
      .enableHiveSupport()//激活hive支持
      .getOrCreate();
  }

  /**
   * DataFrame 转 case class
   * @param df DataFrame
   * @tparam T case class
   * @return
   */
  def dataFrame2Bean[T: Encoder](df: DataFrame, clazz: Class[T]): List[T] = {
    val fieldNames = clazz.getDeclaredFields.map(f => f.getName).toList
    df.toDF(fieldNames: _*).as[T].collectAsList().toList
  }
}
