package cn.bawei.shop.realtime.etl.process

import cn.bawei.canal.bean.RowData
import cn.bawei.shop.realtime.etl.`trait`.MysqlBaseETL
import cn.bawei.shop.realtime.etl.bean.OrderDBEntity
import cn.bawei.shop.realtime.etl.utils.GlobalConfigUtil
import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializerFeature
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.kafka.clients.producer.KafkaProducer

/**
 * 1、在etl包下创建OrderETL类，实现MySqlBaseETL
 * 2、过滤出来表名为itcast_orders，且事件类型为 "insert" 的binlog消息
 * 3、将RowData数据流转换为 OrderDBEntity数据源
 * 4、为了方便落地到 Kafka，再将OrderDBEntity 转换为JSON字符串
 * 5、将转换后的json字符串写入到kafka的**dwd_order**topic中
 *
 * @param env
 */
class OrderETL(env:StreamExecutionEnvironment) extends MysqlBaseETL(env){
  /**
   * 处理数据的接口
   */
  override def process(): Unit = {
    //获取kafka数据源
    val kafkavalue = getKafkaDataSream()
    kafkavalue.printToErr("yuanshuju>>>>>>")
    //过滤出来表名为itcast_orders
    val kafkavaluefilter = kafkavalue.filter(data => {
      data.getTableName.equals("itcast_orders") && data.getEventType.equals("insert")
    })
    //将RowData数据流转换为 OrderDBEntity数据源
    val orderDbEntityValue = kafkavaluefilter.map(data => {
      OrderDBEntity(data)
    })
    //将OrderDBEntity 转换为JSON字符串
    val jsonorderDbEntityValue  = orderDbEntityValue.map(data => {
      JSON.toJSONString(data, SerializerFeature.DisableCircularReferenceDetect)
    })
    jsonorderDbEntityValue.printToErr("订单解析数据》》")
    //写入到kafka的**dwd_order**topic中
    jsonorderDbEntityValue.addSink(kafkaProducer(GlobalConfigUtil.`output.topic.order`))

  }
}
