package com.ydl.learning.flink.demo

import com.alibaba.fastjson.JSON
import com.ydl.learning.flink.demo.entity.TestEntity
import com.ydl.learning.flink.demo.mySink.LogSink
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.{TumblingEventTimeWindows, TumblingProcessingTimeWindows}
import org.apache.flink.streaming.api.windowing.time.Time
import org.slf4j.{Logger, LoggerFactory}

/**
 * 并行demo
 *
 * @author ydl
 * @since 2021/5/11
 */
class ParallelDemo {}

object ParallelDemo extends App with Utils {
  val log: Logger = LoggerFactory.getLogger(classOf[ParallelDemo])
  val info = getSocketTextStream(9999)
  val ds: DataStream[TestEntity] = info._2.map(JSON.parseObject(_, classOf[TestEntity]))
  val data: DataStream[TestEntity] = ds.filter(_.getId == 1)
  val result1 = data.map(entity => {
    log.debug("set age")
    entity.setAge(1)
    log.info(entity.toString)
    Thread.sleep(3000)
    entity
  }).name("map1")
  val result2 = data.map(entity => {
    log.debug("set name")
    entity.setName("ydl")
    log.info(entity.toString)
    Thread.sleep(2000)
    entity
  }).name("map2")

  val result3 = data.map(entity => {
    log.debug("set address")
    entity.setAddress("北京")
    log.info(entity.toString)
    Thread.sleep(1000)
    entity
  }).name("map3")

  val result = result1.union(result2).union(result3).keyBy(_.getId)
  result.addSink(new LogSink[TestEntity]).name("print")
  info._1.execute("Flink add kafka data source")
}
