package com.atguigu.gmall.realtime.handler


import com.atguigu.gmall.realtime.util.{MyEsUtil, MyPropertiesUtil, OffsetManager}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, OffsetRange}

import scala.collection.mutable
import scala.reflect.ClassTag
import org.elasticsearch.spark._

import java.time.LocalDate
import java.util.Properties
object EsHandler {


  private val esHost: String = MyPropertiesUtil.getProperty("es.host")
  private val esPort: String = MyPropertiesUtil.getProperty("es.port")

  def saveDStreamToEs[T:ClassTag](dStream:DStream[T],
                                  indexPrefix:String,
                                  keyField:String,
                                  commitOffsetFunc:Unit=>Unit
                                 ): Unit ={

   //配置信息
    //https://www.elastic.co/guide/en/elasticsearch/hadoop/7.8/configuration.html#cfg-mapping
    dStream.foreachRDD{rdd=>
      val confMap = Map("es.nodes" -> esHost,
        "es.port" -> esPort,
        "es.index.auto.create" -> "true",   //自动创建索引
        "es.nodes.wan.only" -> "true",      //使用域名访问
        "es.batch.size.entries" -> "2000",  //批量提交条数
        "es.mapping.id" -> keyField         //主键列 幂等保证
      )

      rdd.saveToEs(s"${indexPrefix}${LocalDate.now()}/_doc",confMap)
      commitOffsetFunc()

      }



  }



}
