package com.chenjj.bigdata.kafka.scala.test

import java.time.Duration
import java.util
import java.util.Properties

import com.chenjj.bigdata.kafka.LoginFI
import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer}
import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.security.auth.Login
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.junit.Test
import sun.rmi.runtime.Log

import scala.io.Source
import scala.tools.nsc.interactive.tests.Tester

class Tester {

  /**
    * kafkas生产者测试
    */
  @Test
  def pruducerTest(): Unit ={
    val kafkaProp = new Properties()
    kafkaProp.put("bootstrap.servers", "192.168.152.134:9092")
    kafkaProp.put("key.serializer", classOf[StringSerializer].getName)
    kafkaProp.put("value.serializer", classOf[StringSerializer].getName)

    val producer = new KafkaProducer[String, String](kafkaProp)
    val lines = Source.fromFile(Tester.getClass.getResource("/").getPath+"data.txt").getLines()

    for(line <- lines){
      val record = new ProducerRecord[String, String]("test", line)
      producer.send(record,(metadata:RecordMetadata,exception:Exception)=> {
        if (metadata != null) {
          println("发送成功")
        }
        if (exception != null) {
          println("消息发送失败")
          exception.printStackTrace()
        }
      })
    }
    producer.close()
  }

  @Test
  def consumerTest(): Unit ={
    val kafkaProp = new Properties()
    kafkaProp.put("bootstrap.servers", "192.168.152.134:9092")
    kafkaProp.put("group.id", "10002")
    kafkaProp.put("enable.auto.commit", "true")
    kafkaProp.put("auto.offset.reset", "earliest") //从最早的消息开始读取
    kafkaProp.put("key.deserializer", classOf[StringDeserializer].getName)
    kafkaProp.put("value.deserializer", classOf[StringDeserializer].getName)

    val consumer = new KafkaConsumer[String,String](kafkaProp)
    consumer.subscribe(util.Arrays.asList("test"))

    try {
      while (true){
        val records = consumer.poll(Duration.ofSeconds(1))
        records.forEach(record =>{
          printf("topic = %s ,partition = %d,offset = %d, key = %s, value = %s%n\n" ,
            record.topic(),
            record.partition,
            record.offset,
            record.key,
            record.value)
        })
      }
    }catch {
      case e:Exception => e.printStackTrace()
    }finally {
      consumer.close()
    }
  }


  /**
    * kafkas生产者测试4FI
    */
  @Test
  def pruducerTest4FI(): Unit ={

    LoginFI.securityPrepare();

    val kafkaProp = new Properties()
    kafkaProp.setProperty("bootstrap.servers", "10.22.82.41:21007,10.22.82.42:21007,10.22.82.43:21007")
    kafkaProp.setProperty("security.protocol", "SASL_PLAINTEXT")
    kafkaProp.setProperty("sasl.kerberos.service.name", "kafka")
    kafkaProp.put("key.serializer", classOf[StringSerializer].getName)
    kafkaProp.put("value.serializer", classOf[StringSerializer].getName)

    val producer = new KafkaProducer[String, String](kafkaProp)
    val lines = Source.fromFile(Tester.getClass.getResource("/").getPath+"data.txt").getLines()

    for(line <- lines){
      val record = new ProducerRecord[String, String]("test", line)
      producer.send(record,(metadata:RecordMetadata,exception:Exception)=> {
        if (metadata != null) {
          println("发送成功")
        }
        if (exception != null) {
          println("消息发送失败")
          exception.printStackTrace()
        }
      })
    }
    producer.close()
  }

  @Test
  def test2(): Unit ={
      println(Tester.getClass.getResource("/").getPath)
  }
}
