package cn.getech.data.development.source

import java.util.{Properties, UUID}

import cn.getech.data.development.task.FlinkStreamSyncHiveMain.obj
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.table.descriptors.Kafka
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.KafkaProducer

trait FlinkSourceTrait[T] {

  def kafkaProducer(topic: String) = {

    val servers = "bigdata-test-4:9092,bigdata-test-5:9092,bigdata-test-6:9092"
    val props = new Properties()
    props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers)
    props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString)
    props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true")
    props.setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "5000")
    props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")
    props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

    //将封装后的kafka配置项返回
    new KafkaProducer[String, String](props)
  }

  def getKafkaDataStream: DataStream[T]

  def process()
}
