package com.hucais.sync.es2hive.controller

import com.hucais.core.utils.DefaultPropertiesUtil
import com.hucais.sync.es2hive.service.SnycOpenBooksService
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * 迁移ES的数据到Hive
 */
object SnycOpenBooksController {

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")
    val sparkConf = new SparkConf()
      .setAppName("SnycOpenBooksController")
//      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.driver.allowMultipleContexts", "true")
      .set("es.index.auto.create", "false")
      .set("es.nodes", DefaultPropertiesUtil.get("es.nodes"))
      .set("es.port", DefaultPropertiesUtil.get("es.port"))

    val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    val ssc = sparkSession.sparkContext

    ssc.hadoopConfiguration.set("fs.defaultFS", DefaultPropertiesUtil.get("fs.defaultFS"))
    SnycOpenBooksService.action(ssc, sparkSession)
  }
}
