package cn.tecnova.Synchronous

import java.math.BigDecimal

import cn.tecnova.bean.AppmChinaCitys
import cn.tecnova.utils.ConfigHandler
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * description:同步chinacitys到ES
  **/
object ChinaCitys2Es {



  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("es.index.auto.create", "true")
      .set("es.nodes", ConfigHandler.esNodes)
      .set("es.port", ConfigHandler.esPort)
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)
    val sqLContext = new SQLContext(sc)

    import sqLContext.implicits._
    val chinaCitysDF = sqLContext.read.jdbc(ConfigHandler.url,"china_citys",ConfigHandler.props)

    //导入写es相关的包
    import cn.tecnova.bean.IntString._
    import org.elasticsearch.spark.sql._

    val res1 = chinaCitysDF.rdd.map(row => {
      AppmChinaCitys(
        row.getAs[String]("id"),
        row.getAs[BigDecimal]("CITY_ID").toString(),
        row.getAs[BigDecimal]("SUPERIOR_ID").toString(),
        row.getAs[String]("CITY_NAME"),
        row.getAs[String]("TYPE"),
        row.getAs[String]("COUNTRY"),
        row.getAs[Int]("LEVEL").toStringPlusInt,
        row.getAs[Int]("ISLAST").toStringPlusInt,
        row.getAs[String]("FOREIGN_NAME"))
    }).toDF()
    res1.saveToEs("appm_china_citys" + "/appm_china_citys")

    sc.stop()

  }

}
