package cn.tecnova.Synchronous

import java.text.SimpleDateFormat
import java.util.Date

import cn.tecnova.bean.{AppmChinaCitys, AppmIndustryType}
import cn.tecnova.utils.{ConfigHandler, ESUtils}
import com.alibaba.fastjson.JSON
import com.google.gson.Gson
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.elasticsearch.action.update.UpdateResponse
import org.elasticsearch.client.transport.TransportClient
import org.elasticsearch.common.xcontent.XContentFactory
import org.elasticsearch.index.query.QueryBuilders
import org.elasticsearch.search.SearchHit

/**
  * description:同步（增删改）ChinaCitys数据到es
  **/
object AppmChinaCitys2Es {

  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("spark.streaming.kafka.maxRatePerPartition", args(0))
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("es.index.auto.create", "true")
      .set("es.nodes", ConfigHandler.esNodes)
      .set("es.port", ConfigHandler.esPort)
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)

    val ssc = new StreamingContext(sc, Seconds(args(1).toInt))

    val groupid = "g_appmChinaCitys2Es"

    //所有的kafka topic
    val allData: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent, // 将拉取到的数据，均匀分散到每台Executor节点上
      ConsumerStrategies.Subscribe[String, String](Array("appm_china_citys"), ConfigHandler.kafkaParams(groupid))
    )

    allData.foreachRDD(rdd => {

      //获取当前批次偏移量信息
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      //如果当前批次的数据不为空，进行以下处理
      if (!rdd.isEmpty()) {

        rdd.foreachPartition(iter => {

          val client: TransportClient = ESUtils.getEsClient()
          val gson = new Gson()

          try{

          iter.foreach(record => {

            println("接收到kafka数据为："+record.value())

              val jsonObj = JSON.parseObject(record.value())
              val flag = jsonObj.getString("flag")

              //如果是删除数据 就拿出pkey中的主键去es中删除数据
              if("del".equals(flag)){

                val pkeyArr = jsonObj.getJSONArray("pkey")
                println("===== 进入删除数据部分 ===== 删除数据"+ pkeyArr.size +"条")

                //遍历pkey
                for (index <- 0 until pkeyArr.size()) {

                  //拿出每一个pkey
                  val pkey = pkeyArr.getString(index)
                  println("要删除的数据id为" + pkey)

                  //先去es中根据主键查数据，然后拿出es中的id
                  val builder1 = QueryBuilders.matchPhraseQuery("id", pkey)
                  val builder = client.prepareSearch("appm_china_citys").setTypes("appm_china_citys")
                  val sr = builder.setQuery(QueryBuilders.boolQuery()
                    .must(builder1))
                    .execute()
                    .actionGet()
                  val hits: Array[SearchHit] = sr.getHits.getHits

                  //遍历查询结果 取出es Id删除数据
                  if(hits.length!=0){

                    for (hit <- hits) {
                      val response = client.prepareDelete("appm_china_citys", "appm_china_citys", hit.getId).get()
                      println("删除第"+ (index+1) +"条数据返回结果:"+response.getResult + "--" +new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
                    }

                  }

                }
              }


              //如果是新增数据 直接写入es
              if ("ins".equals(flag)) {

                val dataArr = jsonObj.getJSONArray("data")
                println("===== 进入新增数据部分 ===== 新增数据"+ dataArr.size +"条")
                for (index <- 0 until dataArr.size()) {

                  //拿到每条json数据转换成对象
                  val dataJson = dataArr.getJSONObject(index).toString
                  println("新增数据为：" + dataJson)
                  val appmChinaCitys: AppmChinaCitys = gson.fromJson(dataJson, classOf[AppmChinaCitys])

                  //将数据写入es
                  val response = client.prepareIndex("appm_china_citys", "appm_china_citys")
                    .setSource(XContentFactory.jsonBuilder()
                      .startObject()
                      .field("id", appmChinaCitys.id)
                      .field("cityId", appmChinaCitys.cityId)
                      .field("superiorId", appmChinaCitys.superiorId)
                      .field("cityName", appmChinaCitys.cityName)
                      .field("type", appmChinaCitys.`type`)
                      .field("country", appmChinaCitys.country)
                      .field("level", appmChinaCitys.level)
                      .field("islast", appmChinaCitys.islast)
                      .field("foreignName", appmChinaCitys.foreignName)
                      .endObject()
                    ).get()
                  //打印结果
                  println("新增第" +(index+1) + "条数据返回结果:"+response.getResult+"--"+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
                }
              }


              //如果是更新数据
              if ("modi".equals(flag)) {

                //先拿出要更新的数据
                val dataArr = jsonObj.getJSONArray("data")
                println("===== 进入修改数据部分 ===== 修改数据" + dataArr.size +"条")

                for (index <- 0 until dataArr.size()) {

                  //拿到每条json数据转换成对象
                  val dataJson = dataArr.getJSONObject(index).toString
                  println("要修改的数据为：" + dataJson)

                  val appmChinaCitys: AppmChinaCitys = gson.fromJson(dataJson, classOf[AppmChinaCitys])
                  val builder = client.prepareSearch("appm_china_citys").setTypes("appm_china_citys")

                  //先去es中根据主键查数据，然后拿出es中的id
                  val builder1 = QueryBuilders.matchPhraseQuery("id", appmChinaCitys.id)
                  val sr = builder.setQuery(QueryBuilders.boolQuery()
                    .must(builder1))
                    .execute()
                    .actionGet()
                  val hits: Array[SearchHit] = sr.getHits.getHits

                  if(hits.length!=0){

                    //查询出的条数
                    //println("查询出的条数" + hits.length)

                    //遍历查询结果 取出esId新增数据
                    for (hit <- hits) {

                      //println(hit.getSourceAsString)

                      val response: UpdateResponse = client.prepareUpdate("appm_china_citys", "appm_china_citys", hit.getId)
                        .setDoc(XContentFactory.jsonBuilder()
                          .startObject()
                          .field("id", appmChinaCitys.id)
                          .field("cityId", appmChinaCitys.cityId)
                          .field("superiorId", appmChinaCitys.superiorId)
                          .field("cityName", appmChinaCitys.cityName)
                          .field("type", appmChinaCitys.`type`)
                          .field("country", appmChinaCitys.country)
                          .field("level", appmChinaCitys.level)
                          .field("islast", appmChinaCitys.islast)
                          .field("foreignName", appmChinaCitys.foreignName)
                          .endObject())
                        .get()

                      println("更新第" + (index+1) +"条数据结果:"+response.getResult+ "--" +new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
                    }

                  }

                }
              }

          })

            }catch {

              case e:Exception => e.printStackTrace()

            }finally {

            //释放资源
            if (client!=null) client.close()

          }


        })

      }

      //提交偏移量信息
      allData.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

    })

    ssc.start()
    ssc.awaitTermination()

  }
}
