package com.gitee.etl.process.bean
import com.gitee.utils.KafkaProp
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment,_}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011

abstract class MQETL(envs:StreamExecutionEnvironment) extends BeanETL[String] {
  /**
    * 从kafka中读取数据，传递返回的数据类型
    *
    * @param topic
    * @return
    */
  override def getKafkaDataStream(topic: String): DataStream[String] = {
    val consumer = new FlinkKafkaConsumer011[String](topic,new SimpleStringSchema(),KafkaProp.getProperties())
    envs.addSource(consumer)
  }

  /**
    * 处理数据的接口
    */
  override def process();
}
