package cn.tecnova.test

import cn.tecnova.utils.ConfigHandler
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * description:
  **/
object Jury2ES {

  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      .setMaster("local[*]")
      .set("es.index.auto.create", "true")
      .set("es.nodes", "192.168.100.3,192.168.100.6")
      .set("es.port", "9200")
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)
    val sqLContext = new SQLContext(sc)

    import sqLContext.implicits._
    val userSubjectDF = sqLContext.read.jdbc(ConfigHandler.url, "fin_insur_claims_injury", ConfigHandler.props)

    //导入写es相关的包
    import org.elasticsearch.spark.sql._

    val res2 = userSubjectDF.map(row => {
      Fin_insur_claims_injury(
        row.getAs[String]("id"),
        row.getAs[Int]("incId"),
        row.getAs[String]("finClaimsId"),
        row.getAs[String]("woundedName"),
        row.getAs[String]("woundedTel"),
        row.getAs[String]("woundedIdCard"),
        row.getAs[Int]("injurySketch"),
        row.getAs[Int]("policyType"),
        row.getAs[String]("creator"),
        row.getAs[String]("modifier"),
        row.getAs[Int]("state"),
        row.getAs[Double]("reimburseAmount")
      )
    })

    res2.saveToEs("fin_insur_claims_injury" + "/fin_insur_claims_injury")

    sc.stop()

  }

}
