package com.sunzm.flink.datastream.scala.cdc

import com.alibaba.fastjson.JSONObject
import com.ververica.cdc.debezium.DebeziumDeserializationSchema
import io.debezium.data.Envelope
import io.debezium.data.Envelope.FieldName
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation}
import org.apache.flink.util.Collector
import org.apache.kafka.connect.data.{Schema, Struct}
import org.apache.kafka.connect.source.SourceRecord

/**
 * 自定义一个反序列化器，把数据解析成JSON字符串
 */
class JSONStringDebeziumDeserializationSchema extends DebeziumDeserializationSchema[String] {
  override def deserialize(sourceRecord: SourceRecord, collector: Collector[String]): Unit = {
    //获取主题信息,包含着数据库和表名 库名&表名 mysql_binlog_source.test.call_number_type_info
    val topic = sourceRecord.topic()
    val arr: Array[String] = topic.split("\\.")
    val db = arr(1)
    val tableName = arr(2)

    //获取数据
    //获取值信息并转换为Struct类型
    val value: Struct = sourceRecord.value().asInstanceOf[Struct]
    //获取变化后的数据
    val after: Struct = value.getStruct(FieldName.AFTER)

    //source 里也可以取到db和table
    //val source = value.getStruct(FieldName.SOURCE)

    //创建JSON 对象用于存储数据信息
    val data = new JSONObject();
    if (after != null) {
      val schema: Schema = after.schema()

      import scala.collection.JavaConverters._

      schema.fields.asScala.foreach(field => {

        val o: AnyRef = after.get(field)

        data.put(field.name(), o)
      })
    }

    //获取操作类型:  READ DELETE UPDATE CREATE
    val operation: Envelope.Operation = Envelope.operationFor(sourceRecord)
    //创建JSON对象 用于封装最终返回值数据信息
    val result = new JSONObject()
    result.put("database", db)
    result.put("table", tableName)
    result.put("type", operation.toString().toLowerCase())
    result.put("data", data)

    //发送数据到下游
    collector.collect(result.toJSONString())
  }

  override def getProducedType: TypeInformation[String] = BasicTypeInfo.STRING_TYPE_INFO
}
