package cn.tecnova.cd

import cn.tecnova.utils.ConfigHandler
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{Dataset, SQLContext}

/**
  * description:
  * jdbc:mysql://www.slave4.com:3306/tec_base2
  * company_adress_v2
  * 6785 2914
  **/
object BaseCompanyAdressv2 {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("es.index.auto.create", "true")
      .set("es.nodes", ConfigHandler.esNodes)
      .set("es.port", ConfigHandler.esPort)
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)
    val sqLContext = new SQLContext(sc)

    import sqLContext.implicits._
    val rowDF = sqLContext.read.jdbc("jdbc:mysql://www.slave4.com:3306/tec_base2", "company_adress_v2", ConfigHandler.props2)


    //导入写es相关的包
    import org.elasticsearch.spark.sql._

    val result: Dataset[BaseCompanyAdressv2] = rowDF.map(row => {
      BaseCompanyAdressv2(
        row.getAs[Long]("id"),
        row.getAs[String]("key_no"),
        row.getAs[String]("adress"),
        row.getAs[String]("province_code"),
        row.getAs[String]("city_code"),
        row.getAs[String]("county_code"),
        row.getAs[String]("province"),
        row.getAs[String]("city"),
        row.getAs[String]("county")
      )
    })
    result.saveToEs("company_adress" + "/company_adress")


    sc.stop()

  }

}

case class BaseCompanyAdressv2(

                            var id:Long,
                            var keyNo:String,
                            var adress:String,
                            var provinceCode:String,
                            var cityCode:String,
                            var countyCode:String,
                            var province:String,
                            var city:String,
                            var county:String

                          )
