package com.raylu.util

import com.raylu.util.MyKafkaUtil.kafkaParams
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkContext
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import java.util.Properties

/**
 *
 * Description:
 *
 * Create by lucienoz on 2021/11/25.
 * Copyright © 2021 lucienoz. All rights reserved.
 */
object MyKafkaUtil {
	private val prop : Properties = PropertiesUtil.load ( "config.properties" )
	private val kafkaBrokerList : String = prop.getProperty ( "kafka.broker.list" )

	private val kafkaParams : collection.mutable.Map[String, Object] = collection.mutable.Map[String, Object](
		"bootstrap.servers" -> kafkaBrokerList, //用于初始化链接到集群的地址
		"key.deserializer" -> classOf[StringDeserializer],
		"value.deserializer" -> classOf[StringDeserializer],
		//用于标识这个消费者属于哪个消费团体
		"group.id" -> "gmall2021_group",
		//latest自动重置偏移量为最新的偏移量
		"auto.offset.reset" -> "latest",
		//如果是true，则这个消费者的偏移量会在后台自动提交,但是kafka宕机容易丢失数据
		//如果是false，会需要手动维护kafka偏移量
		"enable.auto.commit" -> (true : java.lang.Boolean)
	)

	private val kafkaProducer : KafkaProducer[String, String] = createkafkaProducer

	/**
	 *
	 * @param topic
	 * @param ssc
	 * @param groupId
	 * @return
	 */
	def getKafkaStreamOfConsumer(topic : String, ssc : StreamingContext,groupId:String) : InputDStream[ConsumerRecord[String, String]] = {
		kafkaParams("group.id")=groupId
		KafkaUtils.createDirectStream[String, String]( ssc, LocationStrategies.PreferConsistent,
			ConsumerStrategies.Subscribe[String, String]( Array(topic), kafkaParams = kafkaParams ) )
	}

	/**
	 *
	 * @param topic
	 * @param ssc
	 * @param groupId
	 * @param offsets
	 * @return
	 */
	def getKafkaStreamOfConsumer(topic : String, ssc : StreamingContext,groupId:String,offsets:Map[TopicPartition, Long]) : InputDStream[ConsumerRecord[String, String]] = {
		kafkaParams("group.id")=groupId
		KafkaUtils.createDirectStream[String, String]( ssc, LocationStrategies.PreferConsistent,
			ConsumerStrategies.Subscribe[String, String]( Array(topic), kafkaParams = kafkaParams ,offsets = offsets) )
	}

	/**
	 *
	 * @return
	 */
	def createkafkaProducer(): KafkaProducer[String,String] ={
		val properties : Properties = new Properties ()
		properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,kafkaBrokerList)
		properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
		properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
		properties.put(ProducerConfig.ACKS_CONFIG, "-1")
		properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true")
//		properties.put(ProducerConfig.LINGER_MS_CONFIG, 1)

		val producer : KafkaProducer[String, String] = new KafkaProducer[String, String]( properties )

		producer
	}

	/**
	 *
	 * @param topic
	 * @param msg
	 */
	def send(topic:String,msg:String): Unit ={
//		if(kafkaProducer == null) kafkaProducer =
		kafkaProducer.send(new ProducerRecord[String,String](topic,msg),new Callback {
			override def onCompletion(metadata : RecordMetadata, exception : Exception) : Unit = {
				if(exception==null) println(s"topic=${metadata.topic()} timestamp=${metadata.timestamp()} offset=${metadata.offset()} 	send successed")
				else println(s"topic=${topic} msg=${msg} [:Error:]${exception.printStackTrace()}")
			}
		})
	}

	/**
	 *
	 * @param topic
	 * @param key
	 * @param msg
	 */
	def send(topic:String,key:String,msg:String): Unit ={
//		if(kafkaProducer == null) kafkaProducer = createkafkaProducer
		kafkaProducer.send(new ProducerRecord[String,String](topic,key,msg),new Callback {
			override def onCompletion(metadata : RecordMetadata, exception : Exception) : Unit = {
				if(exception==null) println(s"topic=${metadata.topic()} timestamp=${metadata.timestamp()} offset=${metadata.offset()} 	send successed")
				else println(s"topic=${topic} key=${key} msg=${msg} [:Error:]${exception.printStackTrace()}")
			}
		})
	}

	def flush(): Unit ={
//		if(kafkaProducer == null) kafkaProducer = createkafkaProducer
		kafkaProducer.flush()
	}



	def main(args : Array[String]) : Unit = {
		val test = Array("test")
		println(test.mkString(","))
	}


}
