package KafkaSink

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.util.Properties

import org.apache.flink.api.common.ExecutionConfig
import org.apache.flink.api.common.serialization.TypeInformationSerializationSchema
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, ValueTypeInfo}
import org.apache.flink.api.java.typeutils.runtime.ValueSerializer
import org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer
import org.apache.flink.runtime.state.JavaSerializer
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
import org.apache.kafka.clients.producer.ProducerConfig

/**
 *
 * @author Spring_Hu
 * @date 2021/10/11 11:27
 */
object KafkaSink extends Base {
  def main(args: Array[String]): Unit = {

    run {

      val value = env.addSource(new SourceFunction[WaterSensor] {
        override def run(ctx: SourceFunction.SourceContext[WaterSensor]) = {
          val socket = new Socket("hadoop102", 8888)
          val reader = new BufferedReader(new InputStreamReader(socket.getInputStream))
          while (true) {
            val str = reader.readLine().split(",")
            ctx.collect(new WaterSensor(str(0), Integer.parseInt(str(1)), Integer.parseInt(str(2))))
          }
        }

        override def cancel() = {
        }
      })
      val properties = new Properties()
      properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092")
      properties.setProperty(ProducerConfig.ACKS_CONFIG,"1")

      value.addSink(new FlinkKafkaProducer[WaterSensor]("ff",
        new TypeInformationSerializationSchema[WaterSensor](new GenericTypeInfo(classOf[WaterSensor]),
          new JavaSerializer[WaterSensor]),properties))
    }
  }
}
