package cn.tecnova.Synchronous

import cn.tecnova.bean.AppmIndustryType
import cn.tecnova.utils.ConfigHandler
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark.rdd.EsSpark

/**
  * description:同步industrytype到ES
  **/
object IndustryType2Es {

  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("es.index.auto.create", "true")
      .set("es.nodes", ConfigHandler.esNodes)
      .set("es.port", ConfigHandler.esPort)
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)
    val sqLContext = new SQLContext(sc)

    import sqLContext.implicits._
    val industryTypeDF = sqLContext.read.jdbc(ConfigHandler.url,"industry_type",ConfigHandler.props)

    //导入写es相关的包

    val res = industryTypeDF.map(row=>{
      AppmIndustryType(
        row.getAs[String]("id"),
        row.getAs[String]("father_code"),
        row.getAs[String]("code"),
        row.getAs[String]("name"),
        row.getAs[String]("description"))
    })
//    res.saveToEs("appm_industry_type" + "/appm_industry_type")

    //存到es
    EsSpark.saveToEs(res.rdd,"appm_industry_type/appm_industry_type",Map("es.mapping.id" -> "id"))

    sc.stop()

  }

}
