package com.hucais.sync.hive2es

import com.hucais.core.utils.{DefaultPropertiesUtil, SparkDefaultUtil}
import com.hucais.etl.common.dao.OdsHiveDao
import com.hucais.etl.job.OdsOpenBook
import org.elasticsearch.spark.sql.EsSparkSQL

object SnycOpenBooks {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkDefaultUtil.getSession(isLocal = true, this.getClass.getSimpleName, withEs = true, withHive = true)

    val ssc = sparkSession.sparkContext
    ssc.hadoopConfiguration.set("fs.defaultFS", DefaultPropertiesUtil.get("fs.defaultFS"))

    val openBookDS = OdsHiveDao.getOdsOpenBook(sparkSession)
    import sparkSession.implicits._
    val resultDS = openBookDS.mapPartitions(partitions => {
      partitions.map(item => {
        OdsOpenBook(item.isbn, item.book_name, item.selling_price, item.discount_rate, item.author,
          item.category, item.publishing_house, item.month_sales, item.year_sales, item.total_sales,
          item.book_list, item.channel_type, item.sale_type, item.sale_time)
      })
    })
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("ods.opendata"))
  }
}
