package com.edata.bigdata.viewmain

import com.edata.bigdata.basic.Manager
import com.edata.bigdata.bean.Event
import com.edata.bigdata.flink.kafka.FKSource
import com.edata.bigdata.flink.postgres.FPSink
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala._


object testing {
  def main(args: Array[String]): Unit = {
    val m = Manager(this.getClass)
    /** **********************************创建Kafka Source********************************* */
    val fks = m.createSource[String]("FKSOURCE")
      .asInstanceOf[FKSource[String]]
    fks.brokers = "192.168.36.128:9092"
    fks.topics = "edata"
    fks.deserialzationSchema = new SimpleStringSchema()
    fks.offsetMode = OffsetsInitializer.earliest()
    fks.groupId = "edata"
    fks.createDataStream()
    fks.dataStream.print()

//    val es = fks.dataStream.map(value => {
//      value.split(",")
//    })

    /** *********************************创建 JDBC Sink********************************* */
//    val fps = m.createSink[Event]("FPSINK")
//      .asInstanceOf[FPSink[Event]]
//    fps.sink(es, "id")
    fks.start("Flink Kafka Example")


  }
}