package com.air.antispider.stream.dataprocess.businessprocess

import com.air.antispider.stream.commom.bean.ProcessedData
import com.air.antispider.stream.commom.util.jedis.PropertiesUtil
import com.air.antispider.stream.dataprocess.constants.BehaviorTypeEnum
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.rdd.RDD

object SendData {
  /**
    * 过滤出预订的数据，发送到kafka中
    * @param processedDataRDD
    */
  def sendBook(processedDataRDD: RDD[ProcessedData]): Unit = {
    //首先过滤出预订的数据
    val bookRDD: RDD[ProcessedData] = processedDataRDD.filter(message => {
      message.requestType.behaviorType == BehaviorTypeEnum.Book
    })
    //预订数据的 topic：target.book.topic = processedBook
    val bookTopic = PropertiesUtil.getStringByKey("target.book.topic", "kafkaConfig.properties")
    //创建 map 封装 kafka 参数
    val props = new java.util.HashMap[String, Object]()
    //设置 brokers
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, PropertiesUtil.getStringByKey("default.brokers", "kafkaConfig.properties"))
    //key 序列化方法
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, PropertiesUtil.getStringByKey("default.key_serializer_class_config", "kafkaConfig.properties"))
    //value 序列化方法
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, PropertiesUtil.getStringByKey("default.value_serializer_class_config", "kafkaConfig.properties"))
    //批发送设置：32KB 作为一批次或 100ms 作为一批次
    props.put(ProducerConfig.BATCH_SIZE_CONFIG, PropertiesUtil.getStringByKey("default.batch_size_config", "kafkaConfig.properties"))
    props.put(ProducerConfig.LINGER_MS_CONFIG, PropertiesUtil.getStringByKey("default.linger_ms_config", "kafkaConfig.properties"))

    //将数据发送到kafka
    bookRDD.foreachPartition(rdd =>{
      //创建kafka连接生产者
      val kafkaProducer: KafkaProducer[String, String] = new KafkaProducer[String,String](props)
      rdd.foreach(message => {
        //将每条数据转为kafka可发送的字符串
        val data: String = message.toKafkaString()
        kafkaProducer.send(new ProducerRecord[String,String](bookTopic,data))
      })
      kafkaProducer.close()
    })

  }

  /**
    * 过滤出查询的数据，发送到kafka中
    * @param processedDataRDD
    */
  def sendQuery(processedDataRDD: RDD[ProcessedData]): Unit = {
    //首先过滤出查询的数据
    val queryRDD: RDD[ProcessedData] = processedDataRDD.filter(message => {
      message.requestType.behaviorType == BehaviorTypeEnum.Query
    })
    //查询数据的 topic：target.query.topic = processedQuery
    val queryTopic = PropertiesUtil.getStringByKey("target.query.topic", "kafkaConfig.properties")
    //创建 map 封装 kafka 参数
    val props = new java.util.HashMap[String, Object]()
    //设置 brokers
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, PropertiesUtil.getStringByKey("default.brokers", "kafkaConfig.properties"))
    //key 序列化方法
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, PropertiesUtil.getStringByKey("default.key_serializer_class_config", "kafkaConfig.properties"))
    //value 序列化方法
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, PropertiesUtil.getStringByKey("default.value_serializer_class_config", "kafkaConfig.properties"))
    //批发送设置：32KB 作为一批次或 100ms 作为一批次
    props.put(ProducerConfig.BATCH_SIZE_CONFIG, PropertiesUtil.getStringByKey("default.batch_size_config", "kafkaConfig.properties"))
    props.put(ProducerConfig.LINGER_MS_CONFIG, PropertiesUtil.getStringByKey("default.linger_ms_config", "kafkaConfig.properties"))

    //将数据发送到kafka
    queryRDD.foreachPartition(rdd =>{
      //创建kafka连接生产者
      val kafkaProducer: KafkaProducer[String, String] = new KafkaProducer[String,String](props)
      rdd.foreach(message => {
        //将每条数据转为kafka可发送的字符串
        val data: String = message.toKafkaString()
        kafkaProducer.send(new ProducerRecord[String,String](queryTopic,data))
      })
      kafkaProducer.close()
    })

  }


}
