package com.huitian.net.module

import java.util.Properties

import com.google.inject.{AbstractModule, Provides, Singleton}
import com.huitian.net.core.Adapter
import com.huitian.net.impl.EnAdapterImpl
import com.huitian.net.pipeline.{EnSource, GxSparkSession}
import com.huitian.net.{Hive2MysqlApp, MysqlConfiguration, SparkConfiguration}
import org.apache.spark.sql.{DataFrame, SparkSession}

object MainModule extends AbstractModule {


  override def configure(): Unit = {

    bind(classOf[SparkConfiguration]).asEagerSingleton() // Spark 配置

    bind(classOf[Hive2MysqlApp]) // 程序入口
    bind(classOf[Adapter]).toInstance(EnAdapterImpl) // 企标数据适配

  }

  /**
   * 获取 Spark Session
   *
   * @return SparkSession
   */
  @Provides
  @Singleton
  def GxSparkSession(): GxSparkSession[SparkSession] = {
    new GxSparkSession[SparkSession] {
      override def session(): SparkSession = {
        val sparkConf = new SparkConfiguration
        SparkSession.builder
          //          .master(sparkConf.sparkMaster)
          .enableHiveSupport()
          .config("hive.exec.dynamic.partition", true)
          .config("hive.exec.dynamic.partition.mode", "nonstrict")
          .config("fs.tos.access.key", sparkConf.ak)
          .config("fs.tos.secret.key", sparkConf.sk)
          .enableHiveSupport().getOrCreate()
      }
    }
  }


  /**
   * 加载源数据
   *
   * @return 源数据
   */
  @Provides
  @Singleton
  def EnDataSource(): EnSource[DataFrame] = {
    new EnSource[DataFrame] {
      override def read(): DataFrame = {
        val conf: MysqlConfiguration = new MysqlConfiguration
        //连接参数
        val readConnProperties = new Properties()
        readConnProperties.put("driver", conf.mysqlJdbcDriver)
        readConnProperties.put("user", conf.dataSourceUser)
        readConnProperties.put("password", conf.dataSourcePassword)
        readConnProperties.put("fetchsize", "3")
        GxSparkSession.session().read.jdbc(conf.mysqlJdbcUrl, "car_overview", readConnProperties)

      }

    }
  }


}
