package com.pw.study.flink.transfrom

import com.pw.study.flink.entities.UserBean
import com.pw.study.flink.source.CustomSource
import org.apache.flink.streaming.api.scala._

object MapTF {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(2)
    val ds: DataStream[UserBean] = env.addSource(new CustomSource())
    val dMap: DataStream[(String, Long, Double)] = ds.map(x => ("HELLO_" + x.id, x.ts, x.vc))
    val dMap2: DataStream[UserBean] = ds.map(new MyMap())
    val dMap3: DataStream[UserBean] = ds.map(new MyRichMap()).flatMap(x=>x)
    dMap.print("map: ")
    dMap2.print("map2")
    dMap3.print("Rich3:")
    env.execute()
  }
}
