package cn.tecnova.test

import cn.tecnova.utils.ConfigHandler
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * description:
  * Rabcheng
  * Date:2019/5/30 11:02
  **/
object Fin_insur_claims_threecar2es {

  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
            .setMaster("local[*]")
      .set("es.index.auto.create", "true")
      .set("es.nodes", "192.168.100.3,192.168.100.6")
      .set("es.port", "9200")
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)
    val sqLContext = new SQLContext(sc)

    import sqLContext.implicits._
    val userSubjectDF = sqLContext.read.jdbc(ConfigHandler.url,"fin_insur_claims_threecar",ConfigHandler.props)

    //导入写es相关的包
    import org.elasticsearch.spark.sql._

    val res2 = userSubjectDF.map(row=>{
      Fin_insur_claims_threecar(
        row.getAs[String]("id"),
        row.getAs[Int]("incId"),
        row.getAs[String]("finClaimsId"),
        row.getAs[String]("threePlateNumber"),
        row.getAs[String]("threeVinCode"),
        row.getAs[String]("threeDriverName"),
        row.getAs[String]("threeDriverTel"),
        row.getAs[String]("threeDriverIdCard"),
        row.getAs[String]("threeVehicleType"),
        row.getAs[Int]("policyType"),
        row.getAs[String]("creator"),
        row.getAs[String]("modifier"),
        row.getAs[String]("vinCode"),
        row.getAs[Int]("state"),
        row.getAs[Double]("reimburseAmount")
      )
    })

    res2.saveToEs("fin_insur_claims_threecar" + "/fin_insur_claims_threecar")

    sc.stop()

  }

}
