package com.desheng.bigdata.flink.deserialization

import java.lang

import com.desheng.bigdata.flink.stream.sink.Worker
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.streaming.connectors.kafka.{KafkaDeserializationSchema, KafkaSerializationSchema}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.producer.ProducerRecord

class WorkerKafkaDeSeSchema(topic: String) extends KafkaSerializationSchema[Worker]
    with KafkaDeserializationSchema[Worker] {
    override def isEndOfStream(nextElement: Worker): Boolean = false

    override def getProducedType: TypeInformation[Worker] = TypeInformation.of(classOf[Worker])

    override def serialize(worker: Worker,
                           timestamp: lang.Long): ProducerRecord[Array[Byte], Array[Byte]] = {
        val workerBytes = worker.toString.getBytes
        val producerRecord = new ProducerRecord[Array[Byte], Array[Byte]](
            topic,
            null,
            workerBytes
        )
        producerRecord
    }

    override def deserialize(record: ConsumerRecord[Array[Byte], Array[Byte]]): Worker = {
        val msg = new String(record.value())
        val fields = msg.split(",")
        val id = fields(0).toInt
        val name = fields(1)
        val age = fields(2).toInt
        val dept = fields(3)
        Worker(id, name, age, dept)
    }
}
