package com.central.resident.batch_job

import java.util.Properties

import com.central.utils.LoadESData
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.elasticsearch.spark.rdd.EsSpark

object TestES {
  def main(args: Array[String]): Unit = {
    val pro = new Properties()
    pro.put("user", "root")
    pro.put("password", "root")
    val properties = LoadESData.loadProperties();
    val spark = SparkSession
      .builder()
      //      .master("spark://192.168.5.180:7077")
      .master("local[6]")
      .config("es.nodes",
        properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")
      .getOrCreate()
    var query =
      """
    {
    "query":{
    "match_all":{}
    }
    }
  """.stripMargin
    val esRdd: RDD[(String, collection.Map[String, AnyRef])] =
      EsSpark.esRDD(spark.sparkContext, "stand_wifi_resident/wifi", query)
    import spark.implicits._
    val rdd = esRdd.cache()
    //    val l =

    rdd
      .map(x => {
        (x._2.get("imsi").get + "", "imsi")
      })
      .distinct()
      .union(rdd
        .map(x => {
          (x._2.get("mac").get + "", "mac")
        })
        .distinct())
      .map(x => {

        saveResident(x._1, x._2)
      })
      .toDF()
      .write
      .mode(SaveMode.Append)
      .jdbc("jdbc:mysql://192.168.5.180:3306/ga", "t_resident_test", pro)

    //    rdd

    //    println(l+"=========1")
    println(rdd.count() + "======2")
  }
}

case class saveResident(
                         code: String,
                         code_type: String
                       )
